List of usage examples for java.util.logging FileHandler FileHandler
public FileHandler(String pattern) throws IOException, SecurityException
From source file:org.noroomattheinn.utils.Utils.java
public static void setupLogger(File where, String basename, Logger logger, Level level) { rotateLogs(where, basename, 3);/*from www . j av a 2 s .c o m*/ FileHandler fileHandler; try { logger.setLevel(level); fileHandler = new FileHandler((new File(where, basename + "-00.log")).getAbsolutePath()); fileHandler.setFormatter(new SimpleFormatter()); fileHandler.setLevel(level); logger.addHandler(fileHandler); for (Handler handler : Logger.getLogger("").getHandlers()) { if (handler instanceof ConsoleHandler) { handler.setLevel(level); } } } catch (IOException | SecurityException ex) { logger.severe("Unable to establish log file: " + ex); } }
From source file:BSxSB.Controllers.AdminController.java
@RequestMapping(value = "/deleteaccount", method = RequestMethod.POST) public String deleteAccount(Model model, @RequestParam(value = "email") String email) { try {/*from w w w . j a v a 2 s . com*/ //Initialize the file that the logger writes to. Handler handler = new FileHandler("%tBSxSBAdminStudentAccts.log"); logger.addHandler(handler); handler.setFormatter(new SimpleFormatter()); StudentDAO.deleteAccount(email); List<Students> allStudents = StudentDAO.getAcceptedAccounts(); model.addAttribute("allstudents", allStudents); logger.info("Successfully deleted: " + email); logger.info("Accounts successfully updated to model"); handler.close(); } catch (IOException ex) { logger.log(Level.SEVERE, null, ex); } catch (SecurityException ex) { logger.log(Level.SEVERE, null, ex); } return "adminmanageaccounts"; }
From source file:com.spinn3r.api.Main.java
public static void main(String[] args) throws Exception { // NOTE this could be cleaned up to pass the values into the config // object directly. // parse out propeties. String api = null;/* w w w . ja va 2 s. c o m*/ for (int i = 0; i < args.length; ++i) { String v = args[i]; if (v.startsWith("--api")) { api = getOpt(v); } } if (api == null) api = "permalink"; // First. Determine which API you'd like to use. long after = -1; Format format = Format.PROTOSTREAM; String vendor = null; String remoteFilter = null; Long sleep_duration = null; boolean skip_description = false; String host = "api.spinn3r.com"; for (int i = 0; i < args.length; ++i) { String v = args[i]; if (v.startsWith("--vendor")) { vendor = getOpt(v); continue; } if (v.startsWith("--filter")) { filter = getOpt(v); continue; } if (v.startsWith("--remote-filter")) { remoteFilter = getOpt(v); continue; } if (v.startsWith("--show_results")) { show_results = Integer.parseInt(getOpt(v)); continue; } /* * The code for the --afterTimestamp must come * before the code for --after because --afterTimestamp * also matches startsWith("after"); */ if (v.startsWith("--afterTimestamp")) { after = Long.parseLong(getOpt(v)); continue; } if (v.startsWith("--after")) { after = getOptAsTimeInMillis(v); continue; } /* * The code for the --beforeTimestamp must come * before the code for --before because --beforeTimestamp * also matches startsWith("before"); */ if (v.startsWith("--beforeTimestamp")) { before = Long.parseLong(getOpt(v)); continue; } if (v.startsWith("--before")) { before = getOptAsTimeInMillis(v); continue; } if (v.startsWith("--range")) { range = Long.parseLong(getOpt(v)); continue; } if (v.startsWith("--recover")) { restore = true; continue; } if (v.startsWith("--sleep_duration")) { sleep_duration = Long.parseLong(getOpt(v)); continue; } if (v.startsWith("--save=")) { save = getOpt(v); continue; } if (v.startsWith("--save_method=")) { save_method = getOpt(v); continue; } if (v.startsWith("--skip_description=")) { skip_description = Boolean.parseBoolean(getOpt(v)); continue; } if (v.startsWith("--save_compressed=")) { saveCompressed = Boolean.parseBoolean(getOpt(v)); continue; } if (v.startsWith("--timing")) { timing = "true".equals(getOpt(v)); continue; } if (v.startsWith("--debug")) { logLevel = Level.FINE; debugLogFilePath = getOpt(v); continue; } /* * if ( v.startsWith( "--spam_probability" ) ) { * config.setSpamProbability( Double.parseDouble( getOpt( v ) ) ); * continue; } */ if (v.startsWith("--dump_fields=")) { dumpFields = Boolean.parseBoolean(getOpt(v)); continue; } if (v.startsWith("--dump=")) { dump = Boolean.parseBoolean(getOpt(v)); continue; } if (v.startsWith("--csv=")) { csv = Boolean.parseBoolean(getOpt(v)); continue; } if (v.startsWith("--memory")) { System.out.printf("max memory: %s\n", Runtime.getRuntime().maxMemory()); System.exit(0); } if (v.startsWith("--host")) { host = getOpt(v); continue; } if (v.startsWith("--enable3")) { // is now default continue; } if (v.startsWith("com.spinn3r")) continue; if (v.startsWith("--api")) continue; // That's an unknown command line option. Exit. System.err.printf("Unknown command line option: %s\n", v); syntax(); System.exit(1); } /* * Set the log level */ Logger anonymousLogger = Logger.getAnonymousLogger(); anonymousLogger.setLevel(logLevel); if (debugLogFilePath != null) { anonymousLogger.addHandler(new FileHandler(debugLogFilePath)); } Factory factory = new Factory(); String restoreURL = null; if (save != null && restore) { File savedir = new File(save); Collection<File> logFiles = getLogFiles(savedir); PermalinkLogReaderAdapter adapter = getRestoreURLS(logFiles); restoreURL = adapter.getLastUrl(); long ctr = adapter.getLastCtr(); for (File file : logFiles) { if (!file.delete()) throw new IOException("Failed to delete " + file.toString()); } factory.enableLogging(savedir, 1000000); if (restoreURL != null) { factory.enableRestart(ctr, restoreURL); } logManager = factory.getInjector().getInstance(TransactionHistoryManager.class); } else { logManager = factory.getInjector().getInstance(TransactionHistoryManager.class); } Config<? extends BaseResult> config = null; BaseClient<? extends BaseResult> client = null; if (api.startsWith("feed")) { config = new FeedConfig(); client = new FeedClient(); } else if (api.startsWith("comment")) { config = new CommentConfig(); client = new CommentClient(); } else { config = new PermalinkConfig(); client = new PermalinkClient( restoreURL != null ? ImmutableList.of(restoreURL) : Collections.<String>emptyList()); } config.setCommandLine(StringUtils.join(args, " ")); config.setApi(api); config.setFormat(format); config.setVendor(vendor); config.setHost(host); config.setFilter(remoteFilter); config.setSkipDescription(skip_description); if (sleep_duration != null) client.setSleepDuration(sleep_duration); // assert that we have all required options. if (config.getVendor() == null) { syntax(); System.exit(1); } long maxMemory = Runtime.getRuntime().maxMemory(); long requiredMemory = (save == null) ? PARSE_REQUIRED_MEMORY : SAVE_REQUIRED_MEMORY; if (maxMemory < requiredMemory) { System.out.printf("ERROR: Reference client requires at least 2GB of memory.\n"); System.out.printf("\n"); System.out.printf("Now running with: %s vs %s required\n", maxMemory, requiredMemory); System.out.printf("\n"); System.out.printf("Add -Xmx%dM to your command line and run again.\n", requiredMemory / (1024 * 1024)); System.exit(1); } // use defaults System.out.println("Using vendor: " + config.getVendor()); System.out.println("Using api: " + api); if (after > -1) System.out.printf("After: %s (%s)\n", ISO8601DateParser.toString(Config.timestampToDate(after)), after); if (before > -1) System.out.printf("Before: %s (%s)\n", ISO8601DateParser.toString(Config.timestampToDate(before)), before); System.out.println("Saving results to disk: " + save); // Fetch for the last 5 minutes and then try to get up to date. In // production you'd want to call setFirstRequestURL from the // getLastRequestURL returned from fetch() below if (after == -1) { after = Config.millisecondsToTimestamp(System.currentTimeMillis()); after = after - INTERVAL; } config.setAfterTimestamp(after); new Main().exec(client, config); }
From source file:edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage.java
public String importBatch_action() { FileHandler logFileHandler = null; Logger importLogger = null;//w w w . j a v a 2 s. c om if (importBatchDir == null || importBatchDir.equals("")) return null; try { int importFailureCount = 0; int fileFailureCount = 0; List<Long> studiesToIndex = new ArrayList<Long>(); //sessionId = ((HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false)).getId(); File batchDir = new File(importBatchDir); if (batchDir.exists() && batchDir.isDirectory()) { // create Logger String logTimestamp = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date()); String dvAlias = vdcService.find(importDVId).getAlias(); importLogger = Logger.getLogger( "edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage." + dvAlias + "_" + logTimestamp); String logFileName = FileUtil.getImportFileDir() + File.separator + "batch_" + dvAlias + "_" + logTimestamp + ".log"; logFileHandler = new FileHandler(logFileName); importLogger.addHandler(logFileHandler); importLogger .info("BEGIN BATCH IMPORT (dvId = " + importDVId + ") from directory: " + importBatchDir); for (int i = 0; i < batchDir.listFiles().length; i++) { File studyDir = batchDir.listFiles()[i]; if (studyDir.isDirectory()) { // one directory per study importLogger.info("Found study directory: " + studyDir.getName()); File xmlFile = null; Map<File, String> filesToUpload = new HashMap(); for (int j = 0; j < studyDir.listFiles().length; j++) { File file = studyDir.listFiles()[j]; if ("study.xml".equals(file.getName())) { xmlFile = file; } else { addFile(file, "", filesToUpload); } } if (xmlFile != null) { try { importLogger.info("Found study.xml and " + filesToUpload.size() + " other " + (filesToUpload.size() == 1 ? "file." : "files.")); // TODO: we need to incorporate the add files step into the same transaction of the import!!! Study study = studyService.importStudy(xmlFile, importFileFormat, importDVId, getVDCSessionBean().getLoginBean().getUser().getId()); study.getLatestVersion().setVersionNote("Study imported via batch import."); importLogger.info("Import of study.xml succeeded: study id = " + study.getId()); studiesToIndex.add(study.getId()); if (!filesToUpload.isEmpty()) { List<StudyFileEditBean> fileBeans = new ArrayList(); for (File file : filesToUpload.keySet()) { StudyFileEditBean fileBean = new StudyFileEditBean(file, studyService.generateFileSystemNameSequence(), study); fileBean.getFileMetadata().setCategory(filesToUpload.get(file)); fileBeans.add(fileBean); } try { studyFileService.addFiles(study.getLatestVersion(), fileBeans, getVDCSessionBean().getLoginBean().getUser()); importLogger.info("File upload succeeded."); } catch (Exception e) { fileFailureCount++; importLogger.severe("File Upload failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } } catch (Exception e) { importFailureCount++; importLogger.severe("Import failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } else { // no ddi.xml found in studyDir importLogger.warning("No study.xml file was found in study directory. Skipping... "); } } else { importLogger.warning("Found non directory at top level. Skipping... (filename = " + studyDir.getName() + ")"); } } // generate status message String statusMessage = studiesToIndex.size() + (studiesToIndex.size() == 1 ? " study" : " studies") + " successfully imported"; statusMessage += (fileFailureCount == 0 ? "" : " (" + fileFailureCount + " of which failed file upload)"); statusMessage += (importFailureCount == 0 ? "." : "; " + importFailureCount + (importFailureCount == 1 ? " study" : " studies") + " failed import."); importLogger.info("COMPLETED BATCH IMPORT: " + statusMessage); // now index all studies importLogger.info("POST BATCH IMPORT, start calls to index."); indexService.updateIndexList(studiesToIndex); importLogger.info("POST BATCH IMPORT, calls to index finished."); addMessage("importMessage", "Batch Import request completed."); addMessage("importMessage", statusMessage); addMessage("importMessage", "For more detail see log file at: " + logFileName); } else { addMessage("importMessage", "Batch Import failed: " + importBatchDir + " does not exist or is not a directory."); } } catch (Exception e) { e.printStackTrace(); addMessage("importMessage", "Batch Import failed: An unexpected error occurred during processing."); addMessage("importMessage", "Exception message: " + e.getMessage()); } finally { if (logFileHandler != null) { logFileHandler.close(); importLogger.removeHandler(logFileHandler); } // importBatchDir = ""; } return null; }
From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
public void exportStudyFilesToLegacySystem(String lastUpdateTime, String authority) { // Get list of studies that have been updated yesterday, // and export them to legacy VDC system Logger logger = null;//from w w w. j a v a 2 s . c o m String exportLogDirStr = System.getProperty("vdc.export.log.dir"); if (exportLogDirStr == null) { System.out.println("Missing system property: vdc.export.log.dir. Please add to JVM options"); return; } File exportLogDir = new File(exportLogDirStr); if (!exportLogDir.exists()) { exportLogDir.mkdir(); } logger = Logger.getLogger("edu.harvard.iq.dvn.core.web.servlet.VDCExportServlet"); // Everytime export runs, we want to write to a separate log file (handler). // So if export has run previously, remove the previous handler if (logger.getHandlers() != null && logger.getHandlers().length > 0) { int numHandlers = logger.getHandlers().length; for (int i = 0; i < numHandlers; i++) { logger.removeHandler(logger.getHandlers()[i]); } } SimpleDateFormat formatter = new SimpleDateFormat("yyyy_MM_dd"); FileHandler handler = null; try { handler = new FileHandler( exportLogDirStr + File.separator + "export_" + formatter.format(new Date()) + ".log"); } catch (IOException e) { throw new EJBException(e); } // Add handler to the desired logger logger.addHandler(handler); logger.info("Begin Exporting Studies"); int studyCount = 0; int deletedStudyCount = 0; try { /* THIS IS LEGACY CODE AND SHOULD BE DELETED // For all studies that have been deleted in the dataverse since last export, remove study directory in VDC String query = "SELECT s from DeletedStudy s where s.authority = '" + authority + "' "; List deletedStudies = em.createQuery(query).getResultList(); for (Iterator it = deletedStudies.iterator(); it.hasNext();) { DeletedStudy deletedStudy = (DeletedStudy) it.next(); logger.info("Deleting study " + deletedStudy.getGlobalId()); Study study = em.find(Study.class, deletedStudy.getId()); File legacyStudyDir = new File(FileUtil.getLegacyFileDir() + File.separatorChar + study.getAuthority() + File.separatorChar + study.getStudyId()); // Remove files in the directory, then delete the directory. File[] studyFiles = legacyStudyDir.listFiles(); if (studyFiles != null) { for (int i = 0; i < studyFiles.length; i++) { studyFiles[i].delete(); } } legacyStudyDir.delete(); deletedStudyCount++; em.remove(deletedStudy); } */ // Do export of all studies updated at "lastUpdateTime"" if (authority == null) { authority = vdcNetworkService.find().getAuthority(); } String beginTime = null; String endTime = null; if (lastUpdateTime == null) { Calendar cal = Calendar.getInstance(); cal.add(Calendar.DAY_OF_YEAR, -1); beginTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime()); // Use yesterday as default value cal.add(Calendar.DAY_OF_YEAR, 1); endTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime()); } else { beginTime = lastUpdateTime; Date date = new SimpleDateFormat("yyyy-MM-dd").parse(lastUpdateTime); Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.add(Calendar.DAY_OF_YEAR, 1); endTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime()); } String query = "SELECT s from Study s where s.authority = '" + authority + "' "; query += " and s.lastUpdateTime >'" + beginTime + "'"; // query+=" and s.lastUpdateTime <'" +endTime+"'"; query += " order by s.studyId"; List updatedStudies = em.createQuery(query).getResultList(); for (Iterator it = updatedStudies.iterator(); it.hasNext();) { Study study = (Study) it.next(); logger.info("Exporting study " + study.getStudyId()); exportStudyToLegacySystem(study, authority); studyCount++; } } catch (Exception e) { logger.severe(e.getMessage()); String stackTrace = "StackTrace: \n"; logger.severe("Exception caused by: " + e + "\n"); StackTraceElement[] ste = e.getStackTrace(); for (int m = 0; m < ste.length; m++) { stackTrace += ste[m].toString() + "\n"; } logger.severe(stackTrace); } logger.info("End export, " + studyCount + " studies successfully exported, " + deletedStudyCount + " studies deleted."); }
From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
public void exportStudies(List<Long> studyIds, String exportFormat) { String logTimestamp = exportLogFormatter.format(new Date()); Logger exportLogger = Logger .getLogger("edu.harvard.iq.dvn.core.study.StudyServiceBean.export." + logTimestamp); List<Long> harvestedStudyIds = new ArrayList<Long>(); try {/*from w w w . jav a 2s . com*/ exportLogger.addHandler(new FileHandler( FileUtil.getExportFileDir() + File.separator + "export_" + logTimestamp + ".log")); } catch (IOException e) { logger.severe("Exception adding log file handler " + FileUtil.getExportFileDir() + File.separator + "export_" + logTimestamp + ".log"); return; } try { int exportCount = 0; exportLogger.info("Begin exporting studies, number of possible studies to export: " + studyIds.size()); for (Long studyId : studyIds) { Study study = em.find(Study.class, studyId); if (study.getReleasedVersion() != null) { exportLogger.info("Begin export for study " + study.getGlobalId()); if (exportFormat == null) { studyService.exportStudy(studyId); //TODO check why do we pass the id and not the study } else { studyService.exportStudyToFormat(studyId, exportFormat); //TODO check why do we pass the id and not the study } exportLogger.info("Complete export for study " + study.getGlobalId()); exportCount++; } else { exportLogger .info("No released version for study " + study.getGlobalId() + "; skipping export."); } } exportLogger.info("Completed exporting studies. Actual number of studies exported: " + exportCount); } catch (EJBException e) { logException(e, exportLogger); throw e; } }
From source file:edu.harvard.iq.dvn.core.web.admin.OptionsPage.java
public String importBatch_action() { FileHandler logFileHandler = null; Logger importLogger = null;/* w w w. java 2 s . c o m*/ if (importBatchDir == null || importBatchDir.equals("")) return null; try { int importFailureCount = 0; int fileFailureCount = 0; List<Long> studiesToIndex = new ArrayList<Long>(); //sessionId = ((HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false)).getId(); sessionId = "batchimportsession"; File batchDir = new File(importBatchDir); if (batchDir.exists() && batchDir.isDirectory()) { // create Logger String logTimestamp = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date()); String dvAlias = vdcService.find(importDVId).getAlias(); importLogger = Logger.getLogger( "edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage." + dvAlias + "_" + logTimestamp); String logFileName = FileUtil.getImportFileDir() + File.separator + "batch_" + dvAlias + "_" + logTimestamp + ".log"; logFileHandler = new FileHandler(logFileName); importLogger.addHandler(logFileHandler); importLogger .info("BEGIN BATCH IMPORT (dvId = " + importDVId + ") from directory: " + importBatchDir); for (int i = 0; i < batchDir.listFiles().length; i++) { File studyDir = batchDir.listFiles()[i]; if (studyDir.isDirectory()) { // one directory per study importLogger.info("Found study directory: " + studyDir.getName()); File xmlFile = null; Map<File, String> filesToUpload = new HashMap(); for (int j = 0; j < studyDir.listFiles().length; j++) { File file = studyDir.listFiles()[j]; if ("study.xml".equals(file.getName())) { xmlFile = file; } else { addFile(file, "", filesToUpload, importLogger); } } if (xmlFile != null) { try { importLogger.info("Found study.xml and " + filesToUpload.size() + " other " + (filesToUpload.size() == 1 ? "file." : "files.")); // TODO: we need to incorporate the add files step into the same transaction of the import!!! Study study = studyService.importStudy(xmlFile, importFileFormat, importDVId, getVDCSessionBean().getLoginBean().getUser().getId()); study.getLatestVersion().setVersionNote("Study imported via batch import."); importLogger.info("Import of study.xml succeeded: study id = " + study.getId()); studiesToIndex.add(study.getId()); if (!filesToUpload.isEmpty()) { List<StudyFileEditBean> fileBeans = new ArrayList(); for (File file : filesToUpload.keySet()) { StudyFileEditBean fileBean = new StudyFileEditBean(file, studyService.generateFileSystemNameSequence(), study); fileBean.getFileMetadata().setCategory(filesToUpload.get(file)); fileBeans.add(fileBean); } try { studyFileService.addFiles(study.getLatestVersion(), fileBeans, getVDCSessionBean().getLoginBean().getUser()); importLogger.info("File upload succeeded."); } catch (Exception e) { fileFailureCount++; importLogger.severe("File Upload failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } } catch (Exception e) { importFailureCount++; importLogger.severe("Import failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } else { // no ddi.xml found in studyDir importLogger.warning("No study.xml file was found in study directory. Skipping... "); } } else { importLogger.warning("Found non directory at top level. Skipping... (filename = " + studyDir.getName() + ")"); } } // generate status message String statusMessage = studiesToIndex.size() + (studiesToIndex.size() == 1 ? " study" : " studies") + " successfully imported"; statusMessage += (fileFailureCount == 0 ? "" : " (" + fileFailureCount + " of which failed file upload)"); statusMessage += (importFailureCount == 0 ? "." : "; " + importFailureCount + (importFailureCount == 1 ? " study" : " studies") + " failed import."); importLogger.info("COMPLETED BATCH IMPORT: " + statusMessage); // now index all studies importLogger.info("POST BATCH IMPORT, start calls to index."); indexService.updateIndexList(studiesToIndex); importLogger.info("POST BATCH IMPORT, calls to index finished."); addMessage("importMessage", "Batch Import request completed."); addMessage("importMessage", statusMessage); addMessage("importMessage", "For more detail see log file at: " + logFileName); } else { addMessage("importMessage", "Batch Import failed: " + importBatchDir + " does not exist or is not a directory."); } } catch (Exception e) { e.printStackTrace(); addMessage("importMessage", "Batch Import failed: An unexpected error occurred during processing."); addMessage("importMessage", "Exception message: " + e.getMessage()); } finally { if (logFileHandler != null) { logFileHandler.close(); importLogger.removeHandler(logFileHandler); } // importBatchDir = ""; } return null; }