List of usage examples for java.io FileWriter append
@Override public Writer append(CharSequence csq) throws IOException
From source file:ait.ffma.service.preservation.riskmanagement.TestLODFreebaseRetrieve.java
public void searchFileFormatInformationCsv() throws MalformedURLException, URISyntaxException { log.info(//from www . ja va 2 s . c om "This test evaluates file format information from linked open data repositories and stores results in CSV file."); try { initDataStoreDao(); FileWriter writer = new FileWriter(CSV_FILE); String cFileFormatDescriptionRows = ""; String softwareDescriptionCsv = ""; vendorDescriptionCsv = ""; if (outputMode.equals(LODConstants.LODOutputMode.Columns)) { writer.append("FILE FORMAT DESCRIPTION\n"); writer.append("FORMAT NAME;CURRENT VERSION RELEASE DATE;SOFTWARE COUNT;SOFTWARE;" + "CURRENT FORMAT VERSION;FORMAT LICENSE;LIMITATIONS;PUID;FORMAT HOMEPAGE;MIME TYPE;" + "FORMAT GENRE;FORMAT CREATOR;OPEN FORMAT;FILE EXTENSIONS;VENDORS;STANDARDS\n"); } String res = ""; LODUtils.initCalculationModel(); // initFileFormatList(); initFileFormatListFromAllRepositories(); int counter = 0; // limit to test Iterator<DipFormatId> fileFormatIter = dipFormatIdList.iterator(); while (fileFormatIter.hasNext()) { DipFormatId dipFormatId = fileFormatIter.next(); String currentFileFormat = dipFormatId.getExtension(); // Iterator<String> fileFormatIter = fileFormatList.iterator(); // while (fileFormatIter.hasNext()) { String softwareCsv = ""; vendorListCsv = ""; // String currentFileFormat = fileFormatIter.next(); List<String> fields = Arrays.asList(LODConstants.FB_READ_BY, LODConstants.FB_NAME, LODConstants.FB_ID); List<List<String>> resultList = LODUtils.searchInFreebaseExt( LODConstants.LOD_SOFTWARE_BY_FORMAT_PROPERTY_ID, currentFileFormat, LODConstants.FB_EXTENSION, fields); /** * print out */ Iterator<List<String>> resultIter = resultList.iterator(); res = res + "\n\n" + "LOD PROPERTY: " + "LOD_SOFTWARE_BY_FORMAT_PROPERTY_ID, " + "SEARCH VALUE: " + currentFileFormat + ", " + "LIST SIZE: " + resultList.size() + "\n"; List<String> softwareNames = new ArrayList<String>(); List<String> softwareIDs = new ArrayList<String>(); while (resultIter.hasNext()) { List<String> subList = resultIter.next(); Iterator<String> subIter = subList.iterator(); res = res + "ENTRY: "; String tmpSoftware = ""; String tmpId = ""; int index = 1; while (subIter.hasNext()) { String str = subIter.next(); if (fields.get(index).equals(LODConstants.FB_NAME)) { tmpSoftware = str; res = res + fields.get(index) + ": " + str + ", "; softwareCsv = softwareCsv + str + ", "; } if (fields.get(index).equals(LODConstants.FB_ID)) { tmpId = str; } index++; } if (outputMode.equals(LODConstants.LODOutputMode.Columns)) { // add software name and ID to LODSoftware object softwareIDs.add(tmpId); softwareNames.add(tmpSoftware); softwareDescriptionCsv = softwareDescriptionCsv + generateSoftwareDescriptionColumn(tmpSoftware, tmpId, currentFileFormat); } else { softwareDescriptionCsv = softwareDescriptionCsv + generateSoftwareDescription(tmpSoftware, tmpId); } res = res + "\n"; } List<String> creationDateList = LODUtils.searchInFreebase( LODConstants.LOD_SOFTWARE_BY_FORMAT_PROPERTY_ID, currentFileFormat, LODConstants.FB_EXTENSION, LODConstants.FB_FORMAT_CREATION_DATE, ""); String creationDateStr = ""; Iterator<String> creationDateIter = creationDateList.iterator(); while (creationDateIter.hasNext()) { creationDateStr = creationDateStr + creationDateIter.next() + ", "; } List<String> nameList = LODUtils.searchInFreebase(LODConstants.LOD_SOFTWARE_BY_FORMAT_PROPERTY_ID, currentFileFormat, LODConstants.FB_EXTENSION, LODConstants.FB_NAME, ""); String nameStr = ""; Iterator<String> nameIter = nameList.iterator(); while (nameIter.hasNext()) { nameStr = nameStr + nameIter.next() + ", "; } List<String> mimeTypeList = LODUtils.searchInFreebase( LODConstants.LOD_SOFTWARE_BY_FORMAT_PROPERTY_ID, currentFileFormat, LODConstants.FB_EXTENSION, LODConstants.FB_MIME_TYPE, LODConstants.FB_ID); String mimeTypeStr = ""; Iterator<String> mimeTypeIter = mimeTypeList.iterator(); while (mimeTypeIter.hasNext()) { mimeTypeStr = mimeTypeStr + mimeTypeIter.next() + ", "; } List<String> repositoryIDs = new ArrayList<String>(); List<String> repositoryIdList = LODUtils.searchInFreebase( LODConstants.LOD_SOFTWARE_BY_FORMAT_PROPERTY_ID, currentFileFormat, LODConstants.FB_EXTENSION, LODConstants.FB_ID, LODConstants.FB_ID); Iterator<String> repositoryIdIter = repositoryIdList.iterator(); while (repositoryIdIter.hasNext()) { repositoryIDs.add(repositoryIdIter.next()); } List<String> guidList = LODUtils.searchInFreebase(LODConstants.LOD_SOFTWARE_BY_FORMAT_PROPERTY_ID, currentFileFormat, LODConstants.FB_EXTENSION, LODConstants.FB_GUID, LODConstants.FB_ID); List<String> descriptions = new ArrayList<String>(); String guidStr = ""; Iterator<String> guidIter = guidList.iterator(); while (guidIter.hasNext()) { guidStr = guidIter.next(); String descriptionStr = getDescription(guidStr); descriptions.add(descriptionStr); break; } List<String> genreList = LODUtils.searchInFreebase(LODConstants.LOD_SOFTWARE_BY_FORMAT_PROPERTY_ID, currentFileFormat, LODConstants.FB_EXTENSION, LODConstants.FB_GENRE, LODConstants.FB_NAME); String genreStr = ""; Iterator<String> genreIter = genreList.iterator(); while (genreIter.hasNext()) { genreStr = genreStr + genreIter.next() + ", "; } List<String> creatorList = LODUtils.searchInFreebase( LODConstants.LOD_SOFTWARE_BY_FORMAT_PROPERTY_ID, currentFileFormat, LODConstants.FB_EXTENSION, LODConstants.FB_FORMAT_CREATOR, ""); String creatorStr = ""; Iterator<String> creatorIter = creatorList.iterator(); while (creatorIter.hasNext()) { creatorStr = creatorStr + creatorIter.next() + ", "; creatorStr = creatorStr.replaceAll("&", "&"); // special case } if (outputMode.equals(LODConstants.LODOutputMode.Columns)) { String vendorCsv = ""; vendorCsv = mapFileFormatToVendor.get(currentFileFormat); cFileFormatDescriptionRows = nameStr + ";" + creationDateStr + ";" + resultList.size() + ";" + softwareCsv + ";" + ";" + ";" + ";" + ";" + ";" + mimeTypeStr + ";" + genreStr + ";" + creatorStr + ";" + ";" + currentFileFormat + ";" + vendorCsv + ";" + ";" + "\n"; LODFormat lodFormat = new LODFormat(); lodFormat.setFormatName(nameStr); lodFormat.setCurrentVersionReleaseDate(creationDateStr); lodFormat.setSoftwareCount(resultList.size()); lodFormat.setSoftware(softwareCsv); lodFormat.setMimeType(mimeTypeStr); lodFormat.setFormatGenre(genreStr); lodFormat.setFormatCreator(creatorStr); lodFormat.setFileExtensions(currentFileFormat); lodFormat.setVendors(vendorCsv); if (repositoryIDs.size() > 0) { lodFormat.setRepositoryId(repositoryIDs.toArray(new String[] {})); } lodFormat.setRepository(LODConstants.FREEBASE); lodFormat.setDescription(descriptions.toArray(new String[] {})); lodFormat.setSoftwareName(softwareNames.toArray(new String[] {})); lodFormat.setSoftwareId(softwareIDs.toArray(new String[] {})); LODFormat checkLodFormat = new LODFormat(); checkLodFormat.setRepository(LODConstants.FREEBASE); checkLodFormat.setFormatName(nameStr); updateObject(checkLodFormat, lodFormat); writer.append(cFileFormatDescriptionRows); } else { writer.append("FILE FORMAT DESCRIPTION\n"); writer.append("\n\n ****** FILE FORMAT DESCRIPTION ******\n\n"); writer.append("FORMAT NAME: " + nameStr + "\n"); writer.append("CURRENT VERSION RELEASE DATE: " + creationDateStr + "\n"); writer.append("SOFTWARE COUNT: " + resultList.size() + "\n"); writer.append("SOFTWARE: " + softwareCsv + "\n"); writer.append("CURRENT FORMAT VERSION: \n"); writer.append("FORMAT LICENSE: \n"); writer.append("LIMITATIONS: \n"); writer.append("PUID: \n"); writer.append("FORMAT HOMEPAGE: \n"); writer.append("MIME TYPE: " + mimeTypeStr + "\n"); writer.append("FORMAT GENRE: " + genreStr + "\n"); writer.append("FORMAT CREATOR: " + creatorStr + "\n"); writer.append("OPEN FORMAT: \n"); writer.append("FILE EXTENSIONS: " + currentFileFormat + " \n"); writer.append("VENDORS: " + vendorListCsv + "\n"); writer.append("STANDARDS: \n"); writer.append(softwareDescriptionCsv); } log.info("*** CURRENT COUNT *** " + counter); counter++; // if (counter > 2) break; // limit to test } if (outputMode.equals(LODConstants.LODOutputMode.Columns)) { writer.append("\nSOFTWARE DESCRIPTION\n"); writer.append("SOFTWARE NAME;SOFTWARE LICENSE;SOFTWARE HOMEPAGE;GENRE;" + "OPERATING SYSTEM;PROTOCOLS;PROGRAMMING LANGUAGE;SOFTWARE LATEST VERSION;" + "SOFTWARE RELEASE DATE\n"); writer.append(softwareDescriptionCsv); writer.append("\nVENDOR DESCRIPTION\n"); writer.append("ORGANISATION NAME;NUMBER OF EMPLOYEES;BUSINESS STATUS;" + "CURRENT Ffma;STOCK ISSUES;RANKED LIST;COUNTRY;FOUNDATION DATE;" + "HOMEPAGE\n"); writer.append(vendorDescriptionCsv); } log.info("************ " + res); writer.flush(); writer.close(); } catch (IOException e) { e.printStackTrace(); } }
From source file:snpBasicStatcPedFiles.PruebaLeerArchivo.java
public String procesar(String rutaArchivoPed) { long time_Start_escritura = System.currentTimeMillis(); String nombreArchivo = new File(rutaArchivoPed).getName(); // Ruta para cuando se ejecuta en Linux: //String ruta = String.format( "/home/santiago/snpJsonArray_%s.json", nombreArchivo.replace(".ped", "") ); // Ruta para cuando se ejecuta en Windows: String ruta = String.format("D:\\snpJsonArray_%s.json", nombreArchivo.replace(".tmp", "")); // En Windows, los archivos temporales que gestiona Apache tienen extension .tmp, en Linux no quedan con extensin FileWriter writer; try {//from w w w . ja va 2 s.c o m writer = new FileWriter(ruta); writer.write("["); writer.close(); } catch (IOException ex) { ex.printStackTrace(); } long time_start, time_end; time_start = System.currentTimeMillis(); leer_Archivo(rutaArchivoPed); time_end = System.currentTimeMillis(); System.out.println("Tiempo de Procesar ARchivo " + (time_end - time_start) + " milliseconds"); ProcesamientoPED ped; ProcesamientoPED[] arregloProcesamientoPed = new ProcesamientoPED[arregloParticiones.size()]; Particiones p; System.out.println("tamano arreglo particiones " + arregloParticiones.size()); for (int i = 0; i < arregloParticiones.size(); i++) { p = arregloParticiones.get(i); ped = new ProcesamientoPED(snpArray, p.inicioP, p.finalP, ruta, snpJsonArray, (fileDetail.length - 6) - 1); arregloProcesamientoPed[i] = ped; /* try { ped= new ProcesamientoPED(snpArray, p.inicioP, p.finalP, cadena); ped.start(); ped.join(); } catch (InterruptedException ex) { Logger.getLogger(PruebaLeerArchivo.class.getName()).log(Level.SEVERE, null, ex); } */ } for (ProcesamientoPED procesamientoPed : arregloProcesamientoPed) { procesamientoPed.start(); try { procesamientoPed.join(); } catch (InterruptedException ex) { Logger.getLogger(PruebaLeerArchivo.class.getName()).log(Level.SEVERE, null, ex); } } try { writer = new FileWriter(ruta, true); writer.append("]"); writer.close(); } catch (IOException ex) { ex.printStackTrace(); } long time_End_escritura = System.currentTimeMillis(); System.out.println("---------------------------------IMPORTANTEEEE: TTiempo Finalizacion Escritura: " + (time_End_escritura - time_Start_escritura) + " ms ---------------------------------"); arregloProcesamientoPed = null; return ruta; //return snpJsonArray; }
From source file:org.nuxeo.launcher.config.ConfigurationGenerator.java
/** * Read nuxeo.conf, replace backslashes in paths and write new nuxeo.conf * * @throws ConfigurationException if any error reading or writing nuxeo.conf * @since 5.4.1/* ww w. j a v a2 s . c om*/ */ protected void replaceBackslashes() throws ConfigurationException { StringBuffer sb = new StringBuffer(); BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(nuxeoConf)); String line; while ((line = reader.readLine()) != null) { if (line.matches(".*:\\\\.*")) { line = line.replaceAll("\\\\", "/"); } sb.append(line + System.getProperty("line.separator")); } reader.close(); } catch (IOException e) { throw new ConfigurationException("Error reading " + nuxeoConf, e); } finally { if (reader != null) { try { reader.close(); } catch (IOException e) { throw new ConfigurationException(e); } } } FileWriter writer = null; try { writer = new FileWriter(nuxeoConf, false); // Copy back file content writer.append(sb.toString()); } catch (IOException e) { throw new ConfigurationException("Error writing in " + nuxeoConf, e); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { throw new ConfigurationException(e); } } } }
From source file:au.org.ala.layers.intersect.Grid.java
public void writeHeader(String newfilename, double xmin, double ymin, double xmax, double ymax, double xres, double yres, int nrows, int ncols, double minvalue, double maxvalue, String datatype, String nodata) { FileWriter fw = null; try {// w w w.ja v a 2 s .c o m fw = new FileWriter(newfilename + ".grd"); fw.append("[General]"); fw.append("\r\n").append("Title=").append(newfilename); fw.append("\r\n").append("[GeoReference]"); fw.append("\r\n").append("Projection=GEOGRAPHIC"); fw.append("\r\n").append("Datum=WGS84"); fw.append("\r\n").append("Mapunits=DEGREES"); fw.append("\r\n").append("Columns=").append(String.valueOf(ncols)); fw.append("\r\n").append("Rows=").append(String.valueOf(nrows)); fw.append("\r\n").append("MinX=").append(String.format("%.2f", xmin)); fw.append("\r\n").append("MaxX=").append(String.format("%.2f", xmax)); fw.append("\r\n").append("MinY=").append(String.format("%.2f", ymin)); fw.append("\r\n").append("MaxY=").append(String.format("%.2f", ymax)); fw.append("\r\n").append("ResolutionX=").append(String.valueOf(xres)); fw.append("\r\n").append("ResolutionY=").append(String.valueOf(yres)); fw.append("\r\n").append("[Data]"); fw.append("\r\n").append("DataType=" + datatype); fw.append("\r\n").append("MinValue=").append(String.valueOf(minvalue)); fw.append("\r\n").append("MaxValue=").append(String.valueOf(maxvalue)); fw.append("\r\n").append("NoDataValue=").append(nodata); fw.append("\r\n").append("Transparent=0"); fw.flush(); } catch (Exception e) { logger.error("error writing grid file header", e); } finally { if (fw != null) { try { fw.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } }
From source file:self.philbrown.javaQuery.$.java
/** * Write a String to file, and execute functions once complete. * @param s the String to write to the file * @param path defines the save location of the file * @param append {@code true} to append the new String to the end of the file. {@code false} to overwrite any existing file. * @param async {@code true} if the operation should be performed asynchronously. Otherwise, {@code false}. * @param success Function to invoke on a successful file-write. Parameters received will be: * <ol>/* ww w.j a va2 s.c o m*/ * <li>the String to write * <li>the File that was written (to) * </ol> * @param error Function to invoke on a file I/O error. Parameters received will be: * <ol> * <li>the String to write * <li>the String reason * </ol> */ @SuppressWarnings("unchecked") public static void write(final String s, final String fileName, boolean append, final String notifySuccess, String notifyError) { File file = new File(fileName); if (!file.canWrite()) { if (notifyError != null) EventCenter.trigger(null, notifyError, (Map<String, Object>) $.map($.entry("data", s.getBytes()), $.entry("message", "You do not have file write privelages")), null); return; } try { FileWriter writer = new FileWriter(file); if (append) { writer.append(s); } else { writer.write(s); } if (notifySuccess != null) EventCenter.trigger(null, notifySuccess, (Map<String, Object>) $.map($.entry("data", s.getBytes()), $.entry("message", "Success")), null); } catch (Throwable t) { if (notifyError != null) EventCenter.trigger(null, notifyError, (Map<String, Object>) $.map($.entry("data", s.getBytes()), $.entry("message", "IO Error")), null); } }
From source file:raptor.swt.chat.ChatConsoleController.java
public void onSave() { if (isIgnoringActions()) { return;// w w w. j a va 2s. c om } FileDialog fd = new FileDialog(chatConsole.getShell(), SWT.SAVE); fd.setText(local.getString("chatConsCont5")); fd.setFilterPath(""); String[] filterExt = { "*.txt", "*.*" }; fd.setFilterExtensions(filterExt); final String selected = fd.open(); if (selected != null) { chatConsole.getDisplay().asyncExec(new RaptorRunnable(getConnector()) { public void execute() { FileWriter writer = null; try { writer = new FileWriter(selected); writer.append(local.getString("chatConsCont6")).append(String.valueOf(new Date())) .append("\n"); int i = 0; while (i < chatConsole.getInputText().getCharCount() - 1) { int endIndex = i + TEXT_CHUNK_SIZE; if (endIndex >= chatConsole.getInputText().getCharCount()) { endIndex = i + chatConsole.getInputText().getCharCount() - i - 1; } String string = chatConsole.getInputText().getText(i, endIndex); writer.append(string); i = endIndex; } writer.flush(); } catch (Throwable t) { LOG.error("Error writing file: " + selected, t); } finally { if (writer != null) { try { writer.close(); } catch (IOException ioe) { } } } } }); } }
From source file:org.apache.oozie.service.TestZKXLogStreamingService.java
public void testStreamingWithMultipleOozieServers_errorLog() throws Exception { XLogFilter.reset();//from w w w . j av a 2 s . c om File log4jFile = new File(getTestCaseConfDir(), "test-log4j.properties"); ClassLoader cl = Thread.currentThread().getContextClassLoader(); InputStream is = cl.getResourceAsStream("test-no-dash-log4j.properties"); Properties log4jProps = new Properties(); log4jProps.load(is); // prevent conflicts with other tests by changing the log file location log4jProps.setProperty("log4j.appender.oozie.File", getTestCaseDir() + "/oozie.log"); log4jProps.setProperty("log4j.appender.oozieError.File", getTestCaseDir() + "/oozie-error.log"); log4jProps.store(new FileOutputStream(log4jFile), ""); setSystemProperty(XLogService.LOG4J_FILE, log4jFile.getName()); Services.get().get(XLogService.class).init(Services.get()); File logFile = new File(Services.get().get(XLogService.class).getOozieErrorLogPath(), Services.get().get(XLogService.class).getOozieErrorLogName()); logFile.getParentFile().mkdirs(); FileWriter logWriter = new FileWriter(logFile); // local logs StringBuffer bf = new StringBuffer(); bf.append( "2014-02-06 00:26:56,126 WARN CoordActionInputCheckXCommand:545 [pool-2-thread-26] - USER[-] GROUP[-] " + "TOKEN[-] APP[-] JOB[0000003-140205233038063-oozie-oozi-C] ACTION[0000003-140205233038063-oozie-oozi-C@1] " + "checking for the file ~:8020/user/purushah/examples/input-data/rawLogs/2010/01/01/01/00/_SUCCESS\n") .append("2014-02-06 00:26:56,150 WARN CoordActionInputCheckXCommand:539 [pool-2-thread-26] - USER[-] GROUP[-] " + "TOKEN[-] APP[-] JOB[0000003-140205233038063-oozie-oozi-C] ACTION[0000003-140205233038063-oozie-oozi-C@1] " + "[0000003-140205233038063-oozie-oozi-C@1]::ActionInputCheck::File::8020/user/purushah/examples/input-data/" + "rawLogs/2010/01/01/01/00/_SUCCESS, Exists? :false" + "Action updated in DB! _L1_") .append("\n") .append("2014-02-06 00:27:56,126 WARN CoordActionInputCheckXCommand:545 [pool-2-thread-26] - USER[-] GROUP[-] " + "TOKEN[-] APP[-] JOB[0000003-140205233038063-oozie-oozi-C] ACTION[0000003-140205233038063-oozie-oozi-C@2] " + "checking for the file ~:8020/user/purushah/examples/input-data/rawLogs/2010/01/01/01/00/_SUCCESS\n") .append("2014-02-06 00:27:56,150 WARN CoordActionInputCheckXCommand:539 [pool-2-thread-26] - USER[-] GROUP[-] " + "TOKEN[-] APP[-] JOB[0000003-140205233038063-oozie-oozi-C] ACTION[0000003-140205233038063-oozie-oozi-C@2] " + "[0000003-140205233038063-oozie-oozi-C@2]::ActionInputCheck::File::8020/user/purushah/examples/input-data/" + "rawLogs/2010/01/01/01/00/_SUCCESS, Exists? :false" + "Action updated in DB! _L2_") .append("\n"); logWriter.append(bf); logWriter.close(); XLogFilter.reset(); XLogFilter.defineParameter("USER"); XLogFilter.defineParameter("GROUP"); XLogFilter.defineParameter("TOKEN"); XLogFilter.defineParameter("APP"); XLogFilter.defineParameter("JOB"); XLogFilter.defineParameter("ACTION"); XLogFilter xf = new XLogFilter(); xf.setParameter("USER", ".*"); xf.setParameter("GROUP", ".*"); xf.setParameter("TOKEN", ".*"); xf.setParameter("APP", ".*"); xf.setParameter("JOB", "0000003-140205233038063-oozie-oozi-C"); xf.setParameter(DagXLogInfoService.ACTION, "0000003-140205233038063-oozie-oozi-C@1"); String out = doStreamErrorLog(xf); String[] outArr = out.split("\n"); assertEquals(2, outArr.length); assertTrue(out.contains("_L1_")); assertFalse(out.contains("_L2_")); // We'll use a DummyZKOozie to create an entry in ZK and then set its // url to an (unrelated) servlet that will simply return // some log messages DummyZKOozie dummyOozie = null; EmbeddedServletContainer container = new EmbeddedServletContainer("oozie"); container.addServletEndpoint("/other-oozie-server/*", DummyLogStreamingServlet.class); try { container.start(); dummyOozie = new DummyZKOozie("9876", container.getServletURL("/other-oozie-server/*")); StringBuffer newLog = new StringBuffer(); newLog.append( "2014-02-07 00:26:56,126 WARN CoordActionInputCheckXCommand:545 [pool-2-thread-26] - USER[-] GROUP[-] " + "TOKEN[-] APP[-] JOB[0000003-140205233038063-oozie-oozi-C] ACTION[0000003-140205233038063-oozie-oozi-C@1] " + "checking for the file ~:8020/user/purushah/examples/input-data/rawLogs/2010/01/01/01/00/_SUCCESS\n") .append("2014-02-07 00:26:56,150 WARN CoordActionInputCheckXCommand:539 [pool-2-thread-26] - USER[-] GROUP[-] " + "TOKEN[-] APP[-] JOB[0000003-140205233038063-oozie-oozi-C] ACTION[0000003-140205233038063-oozie-oozi-C@1] " + "[0000003-140205233038063-oozie-oozi-C@1]::ActionInputCheck::File::8020/user/purushah/examples/input-data/" + "rawLogs/2010/01/01/01/00/_SUCCESS, Exists? :false" + "Action updated in DB! _L3_") .append("\n"); DummyLogStreamingServlet.logs = newLog.toString(); out = doStreamErrorLog(xf); outArr = out.split("\n"); assertEquals(4, outArr.length); assertTrue(out.contains("_L1_")); assertTrue(out.contains("_L3_")); assertFalse(out.contains("_L2_")); container.stop(); } finally { if (dummyOozie != null) { dummyOozie.teardown(); } container.stop(); } }
From source file:org.apache.oozie.service.TestZKXLogStreamingService.java
public void testStreamingWithMultipleOozieServers_coordActionList() throws Exception { XLogFilter.reset();//from www . j av a 2s. co m File log4jFile = new File(getTestCaseConfDir(), "test-log4j.properties"); ClassLoader cl = Thread.currentThread().getContextClassLoader(); InputStream is = cl.getResourceAsStream("test-no-dash-log4j.properties"); Properties log4jProps = new Properties(); log4jProps.load(is); // prevent conflicts with other tests by changing the log file location log4jProps.setProperty("log4j.appender.oozie.File", getTestCaseDir() + "/oozie.log"); log4jProps.store(new FileOutputStream(log4jFile), ""); setSystemProperty(XLogService.LOG4J_FILE, log4jFile.getName()); Services.get().get(XLogService.class).init(Services.get()); File logFile = new File(Services.get().get(XLogService.class).getOozieLogPath(), Services.get().get(XLogService.class).getOozieLogName()); logFile.getParentFile().mkdirs(); FileWriter logWriter = new FileWriter(logFile); // local logs StringBuffer bf = new StringBuffer(); bf.append( "2014-02-06 00:26:56,126 DEBUG CoordActionInputCheckXCommand:545 [pool-2-thread-26] - USER[-] GROUP[-] " + "TOKEN[-] APP[-] JOB[0000003-140205233038063-oozie-oozi-C] ACTION[0000003-140205233038063-oozie-oozi-C@1] " + "checking for the file ~:8020/user/purushah/examples/input-data/rawLogs/2010/01/01/01/00/_SUCCESS\n") .append("2014-02-06 00:26:56,150 INFO CoordActionInputCheckXCommand:539 [pool-2-thread-26] - USER[-] GROUP[-] " + "TOKEN[-] APP[-] JOB[0000003-140205233038063-oozie-oozi-C] ACTION[0000003-140205233038063-oozie-oozi-C@1] " + "[0000003-140205233038063-oozie-oozi-C@1]::ActionInputCheck:: File::8020/user/purushah/examples/input-data/" + "rawLogs/2010/01/01/01/00/_SUCCESS, Exists? :false" + "Action updated in DB! _L1_") .append("\n") .append("2014-02-06 00:27:56,126 DEBUG CoordActionInputCheckXCommand:545 [pool-2-thread-26] - USER[-] GROUP[-] " + "TOKEN[-] APP[-] JOB[0000003-140205233038063-oozie-oozi-C] ACTION[0000003-140205233038063-oozie-oozi-C@2] " + "checking for the file ~:8020/user/purushah/examples/input-data/rawLogs/2010/01/01/01/00/_SUCCESS\n") .append("2014-02-06 00:27:56,150 INFO CoordActionInputCheckXCommand:539 [pool-2-thread-26] - USER[-] GROUP[-] " + "TOKEN[-] APP[-] JOB[0000003-140205233038063-oozie-oozi-C] ACTION[0000003-140205233038063-oozie-oozi-C@2] " + "[0000003-140205233038063-oozie-oozi-C@2]::ActionInputCheck:: File::8020/user/purushah/examples/input-data/" + "rawLogs/2010/01/01/01/00/_SUCCESS, Exists? :false" + "Action updated in DB! _L2_") .append("\n"); logWriter.append(bf); logWriter.close(); XLogFilter.reset(); XLogFilter.defineParameter("USER"); XLogFilter.defineParameter("GROUP"); XLogFilter.defineParameter("TOKEN"); XLogFilter.defineParameter("APP"); XLogFilter.defineParameter("JOB"); XLogFilter.defineParameter("ACTION"); XLogFilter xf = new XLogFilter(); xf.setLogLevel("DEBUG|INFO"); xf.setParameter("USER", ".*"); xf.setParameter("GROUP", ".*"); xf.setParameter("TOKEN", ".*"); xf.setParameter("APP", ".*"); xf.setParameter("JOB", "0000003-140205233038063-oozie-oozi-C"); xf.setParameter(DagXLogInfoService.ACTION, "0000003-140205233038063-oozie-oozi-C@1"); String out = doStreamLog(xf); String[] outArr = out.split("\n"); assertEquals(2, outArr.length); assertTrue(out.contains("_L1_")); assertFalse(out.contains("_L2_")); // We'll use a DummyZKOozie to create an entry in ZK and then set its // url to an (unrelated) servlet that will simply return // some log messages DummyZKOozie dummyOozie = null; EmbeddedServletContainer container = new EmbeddedServletContainer("oozie"); container.addServletEndpoint("/other-oozie-server/*", DummyLogStreamingServlet.class); try { container.start(); dummyOozie = new DummyZKOozie("9876", container.getServletURL("/other-oozie-server/*")); DummyLogStreamingServlet.logs = ""; DummyLogStreamingServlet.lastQueryString = null; Map<String, String[]> param = new HashMap<String, String[]>(); param.put(RestConstants.JOB_COORD_RANGE_TYPE_PARAM, new String[] { RestConstants.JOB_LOG_ACTION }); param.put(RestConstants.JOB_COORD_SCOPE_PARAM, new String[] { "1" }); out = doStreamLog(xf, param); assertTrue(DummyLogStreamingServlet.lastQueryString.contains("show=log&allservers=false")); assertTrue(DummyLogStreamingServlet.lastQueryString.contains("type=" + RestConstants.JOB_LOG_ACTION)); assertTrue( DummyLogStreamingServlet.lastQueryString.contains(RestConstants.JOB_COORD_SCOPE_PARAM + "=1")); param.clear(); param.put(RestConstants.JOB_COORD_RANGE_TYPE_PARAM, new String[] { RestConstants.JOB_LOG_ACTION }); param.put(RestConstants.JOB_COORD_SCOPE_PARAM, new String[] { "1-4,5" }); out = doStreamLog(xf, param); assertTrue(DummyLogStreamingServlet.lastQueryString.contains("show=log&allservers=false")); assertTrue(DummyLogStreamingServlet.lastQueryString.contains("type=" + RestConstants.JOB_LOG_ACTION)); assertTrue(DummyLogStreamingServlet.lastQueryString .contains(RestConstants.JOB_COORD_SCOPE_PARAM + "=1-4,5")); param.clear(); Date endDate = new Date(); Date createdDate = new Date(endDate.getTime() / 2); String date = DateUtils.formatDateOozieTZ(createdDate) + "::" + DateUtils.formatDateOozieTZ(endDate); param.put(RestConstants.JOB_COORD_RANGE_TYPE_PARAM, new String[] { RestConstants.JOB_LOG_DATE }); param.put(RestConstants.JOB_COORD_SCOPE_PARAM, new String[] { date }); out = doStreamLog(xf, param); assertTrue(DummyLogStreamingServlet.lastQueryString.contains("show=log&allservers=false")); assertTrue(DummyLogStreamingServlet.lastQueryString.contains("type=" + RestConstants.JOB_LOG_DATE)); assertTrue(DummyLogStreamingServlet.lastQueryString .contains(RestConstants.JOB_COORD_SCOPE_PARAM + "=" + date)); container.stop(); } finally { if (dummyOozie != null) { dummyOozie.teardown(); } container.stop(); } }
From source file:org.energy_home.jemma.javagal.layers.business.GalController.java
/** * recovery of the GAL,//from ww w .j a va2 s . c o m */ public void recoveryGAL() throws Exception { MyRunnable thr = new MyRunnable(this) { @Override public void run() { String filenamelog = System.getProperty("user.home") + File.separator + "GalLog.log"; BufferedWriter bufferFileWriter = null; try { LOG.error("\n\r********GAL node is not responding...Starting recovery procedue. Wait..."); LOG.error("\n\r********STARTING RECOVERY..."); /* Gal is not Responding */ File f = new File(filenamelog); if (!f.exists()) try { f.createNewFile(); } catch (IOException e2) { LOG.error("\n\rError creating file log: " + filenamelog); } FileWriter fileWriter = new FileWriter(f, true); bufferFileWriter = new BufferedWriter(fileWriter); fileWriter .append("\n\r" + new Date(System.currentTimeMillis()).toString() + "STARTING RECOVERY"); /* Used for reset GAL */ if (DataLayer != null) { LOG.debug("Starting reset..."); /* Stop all timers */ synchronized (getNetworkcache()) { for (WrapperWSNNode x : getNetworkcache()) { x.abortTimers(); } } getNetworkcache().clear(); /* Stop discovery and freshness */ /* Destroy Gal Node */ set_GalNode(null); setGatewayStatus(GatewayStatus.GW_READY_TO_START); if (DataLayer.getIKeyInstance().isConnected()) DataLayer.getIKeyInstance().disconnect(); DataLayer.destroy(); LOG.debug("Reset done!"); } /* End of reset section */ if (PropertiesManager.getzgdDongleType().equalsIgnoreCase("freescale")) { DataLayer = new DataFreescale((GalController) this.getParameter()); DataLayer.initialize(); try { DataLayer.getIKeyInstance().initialize(); } catch (Exception e) { DataLayer.getIKeyInstance().disconnect(); throw e; } } else try { throw new Exception("No Platform found!"); } catch (Exception e) { LOG.error("Caught No Platform found", e); } if (DataLayer.getIKeyInstance().isConnected()) { short _EndPoint = 0; if (lastEndPoint == null) { _EndPoint = configureEndpoint(PropertiesManager.getCommandTimeoutMS(), PropertiesManager.getSimpleDescriptorReadFromFile()); if (_EndPoint == 0) throw new Exception("Error on configure endpoint"); } else { _EndPoint = configureEndpoint(PropertiesManager.getCommandTimeoutMS(), lastEndPoint); if (_EndPoint == 0) throw new Exception("Error on configure endpoint"); } Status st = null; if (lastSai != null) { st = startGatewayDevice(PropertiesManager.getCommandTimeoutMS(), -1, lastSai, false); } else { st = startGatewayDevice(PropertiesManager.getCommandTimeoutMS(), -1, PropertiesManager.getSturtupAttributeInfo(), false); } if (st.getCode() != GatewayConstants.SUCCESS) throw new Exception("Error on starting gal" + st.getMessage()); else { LOG.info("***Gateway is ready now... Current GAL Status: " + getGatewayStatus().toString() + "***"); } } LOG.error("********RECOVERY DONE!"); fileWriter.append("\n\r" + new Date(System.currentTimeMillis()).toString() + "RECOVERY DONE!"); return; } catch (Exception e1) { LOG.error("Error resetting GAL"); } finally { try { bufferFileWriter.close(); } catch (IOException e) { LOG.error("Error closing file: {}", filenamelog); } } } }; new Thread(thr).start(); }
From source file:org.jumpmind.symmetric.util.SnapshotUtil.java
public static File createSnapshot(ISymmetricEngine engine) { String dirName = engine.getEngineName().replaceAll(" ", "-") + "-" + new SimpleDateFormat("yyyyMMddHHmmss").format(new Date()); IParameterService parameterService = engine.getParameterService(); File tmpDir = new File(parameterService.getTempDirectory(), dirName); tmpDir.mkdirs();// w w w . j a v a2s . c om File logDir = null; String parameterizedLogDir = parameterService.getString("server.log.dir"); if (isNotBlank(parameterizedLogDir)) { logDir = new File(parameterizedLogDir); } if (logDir != null && logDir.exists()) { log.info("Using server.log.dir setting as the location of the log files"); } else { logDir = new File("logs"); if (!logDir.exists()) { File file = findSymmetricLogFile(); if (file != null) { logDir = file.getParentFile(); } } if (!logDir.exists()) { logDir = new File("../logs"); } if (!logDir.exists()) { logDir = new File("target"); } if (logDir.exists()) { File[] files = logDir.listFiles(); if (files != null) { for (File file : files) { if (file.getName().toLowerCase().endsWith(".log")) { try { FileUtils.copyFileToDirectory(file, tmpDir); } catch (IOException e) { log.warn("Failed to copy " + file.getName() + " to the snapshot directory", e); } } } } } } ITriggerRouterService triggerRouterService = engine.getTriggerRouterService(); List<TriggerHistory> triggerHistories = triggerRouterService.getActiveTriggerHistories(); TreeSet<Table> tables = new TreeSet<Table>(); for (TriggerHistory triggerHistory : triggerHistories) { Table table = engine.getDatabasePlatform().getTableFromCache(triggerHistory.getSourceCatalogName(), triggerHistory.getSourceSchemaName(), triggerHistory.getSourceTableName(), false); if (table != null && !table.getName().toUpperCase() .startsWith(engine.getSymmetricDialect().getTablePrefix().toUpperCase())) { tables.add(table); } } List<Trigger> triggers = triggerRouterService.getTriggers(true); for (Trigger trigger : triggers) { Table table = engine.getDatabasePlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false); if (table != null) { tables.add(table); } } FileWriter fwriter = null; try { fwriter = new FileWriter(new File(tmpDir, "config-export.csv")); engine.getDataExtractorService().extractConfigurationStandalone(engine.getNodeService().findIdentity(), fwriter, TableConstants.SYM_NODE, TableConstants.SYM_NODE_SECURITY, TableConstants.SYM_NODE_IDENTITY, TableConstants.SYM_NODE_HOST, TableConstants.SYM_NODE_CHANNEL_CTL, TableConstants.SYM_CONSOLE_USER); } catch (IOException e) { log.warn("Failed to export symmetric configuration", e); } finally { IOUtils.closeQuietly(fwriter); } FileOutputStream fos = null; try { fos = new FileOutputStream(new File(tmpDir, "table-definitions.xml")); DbExport export = new DbExport(engine.getDatabasePlatform()); export.setFormat(Format.XML); export.setNoData(true); export.exportTables(fos, tables.toArray(new Table[tables.size()])); } catch (IOException e) { log.warn("Failed to export table definitions", e); } finally { IOUtils.closeQuietly(fos); } String tablePrefix = engine.getTablePrefix(); DbExport export = new DbExport(engine.getDatabasePlatform()); export.setFormat(Format.CSV); export.setNoCreateInfo(true); extract(export, new File(tmpDir, "identity.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_IDENTITY)); extract(export, new File(tmpDir, "node.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE)); extract(export, new File(tmpDir, "nodesecurity.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_SECURITY)); extract(export, new File(tmpDir, "nodehost.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_HOST)); extract(export, new File(tmpDir, "triggerhist.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_TRIGGER_HIST)); extract(export, new File(tmpDir, "lock.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_LOCK)); extract(export, new File(tmpDir, "nodecommunication.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_COMMUNICATION)); extract(export, 5000, new File(tmpDir, "outgoingbatch.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_OUTGOING_BATCH)); extract(export, 5000, new File(tmpDir, "incomingbatch.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_INCOMING_BATCH)); final int THREAD_INDENT_SPACE = 50; fwriter = null; try { fwriter = new FileWriter(new File(tmpDir, "threads.txt")); ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); long[] threadIds = threadBean.getAllThreadIds(); for (long l : threadIds) { ThreadInfo info = threadBean.getThreadInfo(l, 100); if (info != null) { String threadName = info.getThreadName(); fwriter.append(StringUtils.rightPad(threadName, THREAD_INDENT_SPACE)); StackTraceElement[] trace = info.getStackTrace(); boolean first = true; for (StackTraceElement stackTraceElement : trace) { if (!first) { fwriter.append(StringUtils.rightPad("", THREAD_INDENT_SPACE)); } else { first = false; } fwriter.append(stackTraceElement.getClassName()); fwriter.append("."); fwriter.append(stackTraceElement.getMethodName()); fwriter.append("()"); int lineNumber = stackTraceElement.getLineNumber(); if (lineNumber > 0) { fwriter.append(": "); fwriter.append(Integer.toString(stackTraceElement.getLineNumber())); } fwriter.append("\n"); } fwriter.append("\n"); } } } catch (IOException e) { log.warn("Failed to export thread information", e); } finally { IOUtils.closeQuietly(fwriter); } fos = null; try { fos = new FileOutputStream(new File(tmpDir, "parameters.properties")); Properties effectiveParameters = engine.getParameterService().getAllParameters(); SortedProperties parameters = new SortedProperties(); parameters.putAll(effectiveParameters); parameters.remove("db.password"); parameters.store(fos, "parameters.properties"); } catch (IOException e) { log.warn("Failed to export parameter information", e); } finally { IOUtils.closeQuietly(fos); } fos = null; try { fos = new FileOutputStream(new File(tmpDir, "parameters-changed.properties")); Properties defaultParameters = new Properties(); InputStream in = SnapshotUtil.class.getResourceAsStream("/symmetric-default.properties"); defaultParameters.load(in); IOUtils.closeQuietly(in); in = SnapshotUtil.class.getResourceAsStream("/symmetric-console-default.properties"); if (in != null) { defaultParameters.load(in); IOUtils.closeQuietly(in); } Properties effectiveParameters = engine.getParameterService().getAllParameters(); Properties changedParameters = new SortedProperties(); Map<String, ParameterMetaData> parameters = ParameterConstants.getParameterMetaData(); for (String key : parameters.keySet()) { String defaultValue = defaultParameters.getProperty((String) key); String currentValue = effectiveParameters.getProperty((String) key); if (defaultValue == null && currentValue != null || (defaultValue != null && !defaultValue.equals(currentValue))) { changedParameters.put(key, currentValue == null ? "" : currentValue); } } changedParameters.remove("db.password"); changedParameters.store(fos, "parameters-changed.properties"); } catch (IOException e) { log.warn("Failed to export parameters-changed information", e); } finally { IOUtils.closeQuietly(fos); } writeRuntimeStats(engine, tmpDir); writeJobsStats(engine, tmpDir); if ("true".equals(System.getProperty(SystemConstants.SYSPROP_STANDALONE_WEB))) { writeDirectoryListing(engine, tmpDir); } fos = null; try { fos = new FileOutputStream(new File(tmpDir, "system.properties")); SortedProperties props = new SortedProperties(); props.putAll(System.getProperties()); props.store(fos, "system.properties"); } catch (IOException e) { log.warn("Failed to export thread information", e); } finally { IOUtils.closeQuietly(fos); } try { File jarFile = new File(getSnapshotDirectory(engine), tmpDir.getName() + ".zip"); JarBuilder builder = new JarBuilder(tmpDir, jarFile, new File[] { tmpDir }, Version.version()); builder.build(); FileUtils.deleteDirectory(tmpDir); return jarFile; } catch (IOException e) { throw new IoException("Failed to package snapshot files into archive", e); } }