List of usage examples for java.util.logging Logger addHandler
public void addHandler(Handler handler) throws SecurityException
From source file:edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage.java
public String importBatch_action() { FileHandler logFileHandler = null; Logger importLogger = null; if (importBatchDir == null || importBatchDir.equals("")) return null; try {/*from ww w. j av a2s.c om*/ int importFailureCount = 0; int fileFailureCount = 0; List<Long> studiesToIndex = new ArrayList<Long>(); //sessionId = ((HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false)).getId(); File batchDir = new File(importBatchDir); if (batchDir.exists() && batchDir.isDirectory()) { // create Logger String logTimestamp = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date()); String dvAlias = vdcService.find(importDVId).getAlias(); importLogger = Logger.getLogger( "edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage." + dvAlias + "_" + logTimestamp); String logFileName = FileUtil.getImportFileDir() + File.separator + "batch_" + dvAlias + "_" + logTimestamp + ".log"; logFileHandler = new FileHandler(logFileName); importLogger.addHandler(logFileHandler); importLogger .info("BEGIN BATCH IMPORT (dvId = " + importDVId + ") from directory: " + importBatchDir); for (int i = 0; i < batchDir.listFiles().length; i++) { File studyDir = batchDir.listFiles()[i]; if (studyDir.isDirectory()) { // one directory per study importLogger.info("Found study directory: " + studyDir.getName()); File xmlFile = null; Map<File, String> filesToUpload = new HashMap(); for (int j = 0; j < studyDir.listFiles().length; j++) { File file = studyDir.listFiles()[j]; if ("study.xml".equals(file.getName())) { xmlFile = file; } else { addFile(file, "", filesToUpload); } } if (xmlFile != null) { try { importLogger.info("Found study.xml and " + filesToUpload.size() + " other " + (filesToUpload.size() == 1 ? "file." : "files.")); // TODO: we need to incorporate the add files step into the same transaction of the import!!! Study study = studyService.importStudy(xmlFile, importFileFormat, importDVId, getVDCSessionBean().getLoginBean().getUser().getId()); study.getLatestVersion().setVersionNote("Study imported via batch import."); importLogger.info("Import of study.xml succeeded: study id = " + study.getId()); studiesToIndex.add(study.getId()); if (!filesToUpload.isEmpty()) { List<StudyFileEditBean> fileBeans = new ArrayList(); for (File file : filesToUpload.keySet()) { StudyFileEditBean fileBean = new StudyFileEditBean(file, studyService.generateFileSystemNameSequence(), study); fileBean.getFileMetadata().setCategory(filesToUpload.get(file)); fileBeans.add(fileBean); } try { studyFileService.addFiles(study.getLatestVersion(), fileBeans, getVDCSessionBean().getLoginBean().getUser()); importLogger.info("File upload succeeded."); } catch (Exception e) { fileFailureCount++; importLogger.severe("File Upload failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } } catch (Exception e) { importFailureCount++; importLogger.severe("Import failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } else { // no ddi.xml found in studyDir importLogger.warning("No study.xml file was found in study directory. Skipping... "); } } else { importLogger.warning("Found non directory at top level. Skipping... (filename = " + studyDir.getName() + ")"); } } // generate status message String statusMessage = studiesToIndex.size() + (studiesToIndex.size() == 1 ? " study" : " studies") + " successfully imported"; statusMessage += (fileFailureCount == 0 ? "" : " (" + fileFailureCount + " of which failed file upload)"); statusMessage += (importFailureCount == 0 ? "." : "; " + importFailureCount + (importFailureCount == 1 ? " study" : " studies") + " failed import."); importLogger.info("COMPLETED BATCH IMPORT: " + statusMessage); // now index all studies importLogger.info("POST BATCH IMPORT, start calls to index."); indexService.updateIndexList(studiesToIndex); importLogger.info("POST BATCH IMPORT, calls to index finished."); addMessage("importMessage", "Batch Import request completed."); addMessage("importMessage", statusMessage); addMessage("importMessage", "For more detail see log file at: " + logFileName); } else { addMessage("importMessage", "Batch Import failed: " + importBatchDir + " does not exist or is not a directory."); } } catch (Exception e) { e.printStackTrace(); addMessage("importMessage", "Batch Import failed: An unexpected error occurred during processing."); addMessage("importMessage", "Exception message: " + e.getMessage()); } finally { if (logFileHandler != null) { logFileHandler.close(); importLogger.removeHandler(logFileHandler); } // importBatchDir = ""; } return null; }
From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
public void exportStudies(List<Long> studyIds, String exportFormat) { String logTimestamp = exportLogFormatter.format(new Date()); Logger exportLogger = Logger .getLogger("edu.harvard.iq.dvn.core.study.StudyServiceBean.export." + logTimestamp); List<Long> harvestedStudyIds = new ArrayList<Long>(); try {// w ww.j a v a2 s . c o m exportLogger.addHandler(new FileHandler( FileUtil.getExportFileDir() + File.separator + "export_" + logTimestamp + ".log")); } catch (IOException e) { logger.severe("Exception adding log file handler " + FileUtil.getExportFileDir() + File.separator + "export_" + logTimestamp + ".log"); return; } try { int exportCount = 0; exportLogger.info("Begin exporting studies, number of possible studies to export: " + studyIds.size()); for (Long studyId : studyIds) { Study study = em.find(Study.class, studyId); if (study.getReleasedVersion() != null) { exportLogger.info("Begin export for study " + study.getGlobalId()); if (exportFormat == null) { studyService.exportStudy(studyId); //TODO check why do we pass the id and not the study } else { studyService.exportStudyToFormat(studyId, exportFormat); //TODO check why do we pass the id and not the study } exportLogger.info("Complete export for study " + study.getGlobalId()); exportCount++; } else { exportLogger .info("No released version for study " + study.getGlobalId() + "; skipping export."); } } exportLogger.info("Completed exporting studies. Actual number of studies exported: " + exportCount); } catch (EJBException e) { logException(e, exportLogger); throw e; } }
From source file:de.interactive_instruments.ShapeChange.Target.FeatureCatalogue.FeatureCatalogue.java
private void fopWrite(String xmlName, String xslfofileName, String outfileName, String outputMimetype) { Properties outputFormat = OutputPropertiesFactory.getDefaultMethodProperties("xml"); outputFormat.setProperty("indent", "yes"); outputFormat.setProperty("{http://xml.apache.org/xalan}indent-amount", "2"); outputFormat.setProperty("encoding", encoding); // redirect FOP-logging to our system, Level 'Warning' by default Logger fl = Logger.getLogger("org.apache.fop"); fl.setLevel(Level.WARNING);//from w ww .ja v a 2 s . c o m FopMsgHandler fmh = new FopMsgHandler(result, this); fl.addHandler(fmh); try { // configure fopFactory as desired FopFactory fopFactory = FopFactory.newInstance(); FOUserAgent foUserAgent = fopFactory.newFOUserAgent(); // configure foUserAgent as desired boolean skip = false; // Setup directories File outDir = new File(outputDirectory); // Setup input and output files File xmlFile = new File(outDir, xmlName); File xsltFile = new File(xsltPath, xslfofileName); File outFile = new File(outDir, outfileName); if (!xmlFile.canRead()) { result.addError(null, 301, xmlFile.getName(), outfileName); skip = true; } if (!xsltFile.canRead()) { result.addError(null, 301, xsltFile.getName(), outfileName); skip = true; } if (skip == false) { // Setup output OutputStream out = null; try { out = new java.io.FileOutputStream(outFile); out = new java.io.BufferedOutputStream(out); } catch (Exception e) { result.addError(null, 304, outFile.getName(), e.getMessage()); skip = true; } if (skip == false) { try { // Construct fop with desired output format Fop fop = fopFactory.newFop(MimeConstants.MIME_PDF, foUserAgent, out); // Setup XSLT if (xslTransformerFactory != null) { // use TransformerFactory specified in configuration System.setProperty("javax.xml.transform.TransformerFactory", xslTransformerFactory); } else { // use TransformerFactory determined by system } TransformerFactory factory = TransformerFactory.newInstance(); Transformer transformer = factory.newTransformer(new StreamSource(xsltFile)); FopErrorListener el = new FopErrorListener(xmlFile.getName(), result, this); transformer.setErrorListener(el); // Set the value of a <param> in the stylesheet transformer.setParameter("versionParam", "2.0"); // Setup input for XSLT transformation Source src = new StreamSource(xmlFile); // Resulting SAX events (the generated FO) must be piped // through to FOP Result res = new SAXResult(fop.getDefaultHandler()); // Start XSLT transformation and FOP processing transformer.transform(src, res); } catch (Exception e) { result.addError(null, 304, outfileName, e.getMessage()); skip = true; } finally { out.close(); result.addResult(getTargetID(), outputDirectory, outfileName, null); if (deleteXmlFile) xmlFile.delete(); } } } } catch (Exception e) { String m = e.getMessage(); if (m != null) { result.addError(m); } e.printStackTrace(System.err); } }
From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
public void exportStudyFilesToLegacySystem(String lastUpdateTime, String authority) { // Get list of studies that have been updated yesterday, // and export them to legacy VDC system Logger logger = null; String exportLogDirStr = System.getProperty("vdc.export.log.dir"); if (exportLogDirStr == null) { System.out.println("Missing system property: vdc.export.log.dir. Please add to JVM options"); return;// w ww . ja v a 2 s.c o m } File exportLogDir = new File(exportLogDirStr); if (!exportLogDir.exists()) { exportLogDir.mkdir(); } logger = Logger.getLogger("edu.harvard.iq.dvn.core.web.servlet.VDCExportServlet"); // Everytime export runs, we want to write to a separate log file (handler). // So if export has run previously, remove the previous handler if (logger.getHandlers() != null && logger.getHandlers().length > 0) { int numHandlers = logger.getHandlers().length; for (int i = 0; i < numHandlers; i++) { logger.removeHandler(logger.getHandlers()[i]); } } SimpleDateFormat formatter = new SimpleDateFormat("yyyy_MM_dd"); FileHandler handler = null; try { handler = new FileHandler( exportLogDirStr + File.separator + "export_" + formatter.format(new Date()) + ".log"); } catch (IOException e) { throw new EJBException(e); } // Add handler to the desired logger logger.addHandler(handler); logger.info("Begin Exporting Studies"); int studyCount = 0; int deletedStudyCount = 0; try { /* THIS IS LEGACY CODE AND SHOULD BE DELETED // For all studies that have been deleted in the dataverse since last export, remove study directory in VDC String query = "SELECT s from DeletedStudy s where s.authority = '" + authority + "' "; List deletedStudies = em.createQuery(query).getResultList(); for (Iterator it = deletedStudies.iterator(); it.hasNext();) { DeletedStudy deletedStudy = (DeletedStudy) it.next(); logger.info("Deleting study " + deletedStudy.getGlobalId()); Study study = em.find(Study.class, deletedStudy.getId()); File legacyStudyDir = new File(FileUtil.getLegacyFileDir() + File.separatorChar + study.getAuthority() + File.separatorChar + study.getStudyId()); // Remove files in the directory, then delete the directory. File[] studyFiles = legacyStudyDir.listFiles(); if (studyFiles != null) { for (int i = 0; i < studyFiles.length; i++) { studyFiles[i].delete(); } } legacyStudyDir.delete(); deletedStudyCount++; em.remove(deletedStudy); } */ // Do export of all studies updated at "lastUpdateTime"" if (authority == null) { authority = vdcNetworkService.find().getAuthority(); } String beginTime = null; String endTime = null; if (lastUpdateTime == null) { Calendar cal = Calendar.getInstance(); cal.add(Calendar.DAY_OF_YEAR, -1); beginTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime()); // Use yesterday as default value cal.add(Calendar.DAY_OF_YEAR, 1); endTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime()); } else { beginTime = lastUpdateTime; Date date = new SimpleDateFormat("yyyy-MM-dd").parse(lastUpdateTime); Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.add(Calendar.DAY_OF_YEAR, 1); endTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime()); } String query = "SELECT s from Study s where s.authority = '" + authority + "' "; query += " and s.lastUpdateTime >'" + beginTime + "'"; // query+=" and s.lastUpdateTime <'" +endTime+"'"; query += " order by s.studyId"; List updatedStudies = em.createQuery(query).getResultList(); for (Iterator it = updatedStudies.iterator(); it.hasNext();) { Study study = (Study) it.next(); logger.info("Exporting study " + study.getStudyId()); exportStudyToLegacySystem(study, authority); studyCount++; } } catch (Exception e) { logger.severe(e.getMessage()); String stackTrace = "StackTrace: \n"; logger.severe("Exception caused by: " + e + "\n"); StackTraceElement[] ste = e.getStackTrace(); for (int m = 0; m < ste.length; m++) { stackTrace += ste[m].toString() + "\n"; } logger.severe(stackTrace); } logger.info("End export, " + studyCount + " studies successfully exported, " + deletedStudyCount + " studies deleted."); }
From source file:edu.harvard.iq.dvn.core.web.admin.OptionsPage.java
public String importBatch_action() { FileHandler logFileHandler = null; Logger importLogger = null; if (importBatchDir == null || importBatchDir.equals("")) return null; try {//w w w .j a v a 2s . c o m int importFailureCount = 0; int fileFailureCount = 0; List<Long> studiesToIndex = new ArrayList<Long>(); //sessionId = ((HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false)).getId(); sessionId = "batchimportsession"; File batchDir = new File(importBatchDir); if (batchDir.exists() && batchDir.isDirectory()) { // create Logger String logTimestamp = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date()); String dvAlias = vdcService.find(importDVId).getAlias(); importLogger = Logger.getLogger( "edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage." + dvAlias + "_" + logTimestamp); String logFileName = FileUtil.getImportFileDir() + File.separator + "batch_" + dvAlias + "_" + logTimestamp + ".log"; logFileHandler = new FileHandler(logFileName); importLogger.addHandler(logFileHandler); importLogger .info("BEGIN BATCH IMPORT (dvId = " + importDVId + ") from directory: " + importBatchDir); for (int i = 0; i < batchDir.listFiles().length; i++) { File studyDir = batchDir.listFiles()[i]; if (studyDir.isDirectory()) { // one directory per study importLogger.info("Found study directory: " + studyDir.getName()); File xmlFile = null; Map<File, String> filesToUpload = new HashMap(); for (int j = 0; j < studyDir.listFiles().length; j++) { File file = studyDir.listFiles()[j]; if ("study.xml".equals(file.getName())) { xmlFile = file; } else { addFile(file, "", filesToUpload, importLogger); } } if (xmlFile != null) { try { importLogger.info("Found study.xml and " + filesToUpload.size() + " other " + (filesToUpload.size() == 1 ? "file." : "files.")); // TODO: we need to incorporate the add files step into the same transaction of the import!!! Study study = studyService.importStudy(xmlFile, importFileFormat, importDVId, getVDCSessionBean().getLoginBean().getUser().getId()); study.getLatestVersion().setVersionNote("Study imported via batch import."); importLogger.info("Import of study.xml succeeded: study id = " + study.getId()); studiesToIndex.add(study.getId()); if (!filesToUpload.isEmpty()) { List<StudyFileEditBean> fileBeans = new ArrayList(); for (File file : filesToUpload.keySet()) { StudyFileEditBean fileBean = new StudyFileEditBean(file, studyService.generateFileSystemNameSequence(), study); fileBean.getFileMetadata().setCategory(filesToUpload.get(file)); fileBeans.add(fileBean); } try { studyFileService.addFiles(study.getLatestVersion(), fileBeans, getVDCSessionBean().getLoginBean().getUser()); importLogger.info("File upload succeeded."); } catch (Exception e) { fileFailureCount++; importLogger.severe("File Upload failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } } catch (Exception e) { importFailureCount++; importLogger.severe("Import failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } else { // no ddi.xml found in studyDir importLogger.warning("No study.xml file was found in study directory. Skipping... "); } } else { importLogger.warning("Found non directory at top level. Skipping... (filename = " + studyDir.getName() + ")"); } } // generate status message String statusMessage = studiesToIndex.size() + (studiesToIndex.size() == 1 ? " study" : " studies") + " successfully imported"; statusMessage += (fileFailureCount == 0 ? "" : " (" + fileFailureCount + " of which failed file upload)"); statusMessage += (importFailureCount == 0 ? "." : "; " + importFailureCount + (importFailureCount == 1 ? " study" : " studies") + " failed import."); importLogger.info("COMPLETED BATCH IMPORT: " + statusMessage); // now index all studies importLogger.info("POST BATCH IMPORT, start calls to index."); indexService.updateIndexList(studiesToIndex); importLogger.info("POST BATCH IMPORT, calls to index finished."); addMessage("importMessage", "Batch Import request completed."); addMessage("importMessage", statusMessage); addMessage("importMessage", "For more detail see log file at: " + logFileName); } else { addMessage("importMessage", "Batch Import failed: " + importBatchDir + " does not exist or is not a directory."); } } catch (Exception e) { e.printStackTrace(); addMessage("importMessage", "Batch Import failed: An unexpected error occurred during processing."); addMessage("importMessage", "Exception message: " + e.getMessage()); } finally { if (logFileHandler != null) { logFileHandler.close(); importLogger.removeHandler(logFileHandler); } // importBatchDir = ""; } return null; }