List of usage examples for java.util.logging FileHandler FileHandler
public FileHandler(String pattern) throws IOException, SecurityException
From source file:com.cisco.oss.foundation.logging.FoundationLogger.java
private static void setupJULSupport(URL resource) { boolean julSupportEnabled = Boolean.valueOf(log4jConfigProps .getProperty(FoundationLoggerConstants.Foundation_JUL_SUPPORT_ENABLED.toString(), "false")); if (julSupportEnabled) { String appenderRef = log4jConfigProps .getProperty(FoundationLoggerConstants.Foundation_JUL_APPENDER_REF.toString()); if (StringUtils.isBlank(appenderRef)) { Enumeration allAppenders = Logger.getRootLogger().getAllAppenders(); while (allAppenders.hasMoreElements()) { Appender appender = (Appender) allAppenders.nextElement(); if (appender instanceof FileAppender) { appenderRef = appender.getName(); getLogger(FoundationLogger.class) .info("*** Using '" + appenderRef + "' as the Java util logging appender ref ***"); System.err.println( "*** Using '" + appenderRef + "' as the Java util logging appender ref ***"); break; }/* w w w. ja v a 2 s. c om*/ } } if (StringUtils.isBlank(appenderRef)) { throw new IllegalArgumentException( "Java util support was enabled but couldn't find a matching appender under the '" + FoundationLoggerConstants.Foundation_JUL_APPENDER_REF.toString() + "' key."); } Handler handler = null; Appender appender = Logger.getRootLogger().getAppender(appenderRef); if (appender == null) { Enumeration allAppenders = Logger.getRootLogger().getAllAppenders(); while (allAppenders.hasMoreElements()) { Appender tempAppender = (Appender) allAppenders.nextElement(); if (tempAppender instanceof AsyncAppender) { AsyncAppender asyncAppender = (AsyncAppender) tempAppender; Enumeration asyncAppenderAllAppenders = asyncAppender.getAllAppenders(); while (asyncAppenderAllAppenders.hasMoreElements()) { Appender asyncTempAppender = (Appender) asyncAppenderAllAppenders.nextElement(); if (appenderRef.equals(asyncTempAppender.getName())) { appender = asyncTempAppender; break; } } if (appender != null) { break; } } } } if (appender instanceof FileAppender) { try { handler = new FileHandler(((FileAppender) appender).getFile()); } catch (IOException e) { throw new IllegalArgumentException( "IOException encountered when trying to setup jul logging: " + e, e); } } else if (appender instanceof ConsoleAppender) { handler = new ConsoleHandler(); } else { getLogger(FoundationLogger.class) .error("got a reference to an unsupported appender: " + appenderRef); } if (handler != null) { // System.setProperty("java.util.logging.config.file",resource.getPath()); java.util.logging.LogManager.getLogManager().reset(); try { java.util.logging.LogManager.getLogManager().readConfiguration(resource.openStream()); } catch (IOException e) { throw new IllegalArgumentException( "IOException encountered when trying to read log4j properties file: " + e, e); } handler.setLevel(java.util.logging.Level.FINEST); handler.setFormatter(new FoundationLogFormatter()); java.util.logging.Logger rootLogger = java.util.logging.Logger.getLogger(""); rootLogger.addHandler(handler); rootLogger.setLevel(java.util.logging.Level.SEVERE); Properties julLoggerSubset = getPropertiesSubset("jul.logger"); if (!julLoggerSubset.isEmpty()) { Set<Object> keySet = julLoggerSubset.keySet(); for (Object key : keySet) { java.util.logging.Logger logger = java.util.logging.Logger.getLogger((String) key); logger.setLevel(java.util.logging.Level.parse((String) julLoggerSubset.get(key))); } } } } }
From source file:org.trec.liveqa.TrecLiveQaDemoServer.java
public static void main(String[] args) throws IOException { TrecLiveQaDemoServer server = new TrecLiveQaDemoServer( args.length == 0 ? DEFAULT_PORT : Integer.parseInt(args[0])); Handler fh = new FileHandler(LOG_FILENAME); logger.addHandler(fh);/*from www .j a v a2 s.co m*/ logger.finest("Test message"); server.start(); System.in.read(); server.stop(); }
From source file:org.cloudifysource.azure.CliAzureDeploymentTest.java
@Test(timeout = 120 * 60 * 1000L) public void repeatTest() throws Throwable { DateFormat df = new SimpleDateFormat("_yyyy-MM-dd_hh-mm"); int repeat = 1; for (int i = 1; i <= repeat; i++) { //overwrites any existing file with that name. String filePattern = "azuretest" + i + df.format(new Date()) + ".log"; FileHandler fileHandler = new FileHandler(filePattern); fileHandler.setFormatter(new SimpleFormatter()); logger.addHandler(fileHandler);/* ww w .j a v a 2s.c o m*/ logger.info("Starting test iteration #" + i); boolean failed = false; try { before(); test(); } catch (Throwable t) { failed = true; throw t; } finally { if (failed) { logger.info( "Failed test iteration #" + i + ". Machines are left running for manual diagnostics"); logger.removeHandler(fileHandler); try { SimpleMail.send("Azure test failed\nSubscription ID=" + credentials.getHostedServicesSubscriptionId(), new File(filePattern)); } catch (Exception e) { logger.log(Level.SEVERE, "Failed to send email", e); } after(); // no need to break since an exception was raised and is going to fail the test } else { logger.info("Passed test iteration #" + i); logger.removeHandler(fileHandler); try { SimpleMail.send("Azure test passed\nSubscription ID=" + credentials.getHostedServicesSubscriptionId(), new File(filePattern)); } catch (Exception e) { logger.log(Level.SEVERE, "Failed to send email", e); } after(); // no need to break since we want to test the run multiple times (or until it fails) } } } }
From source file:BSxSB.Controllers.AdminController.java
@RequestMapping(value = "/deleteschool", method = RequestMethod.POST) public String deleteSchool(Model model, @RequestParam(value = "schoolID") int schoolID) { SchoolDAO.deleteSchool(schoolID);/* www . ja v a 2 s .co m*/ try { //Initialize the file that the logger writes to. Handler handler = new FileHandler("%tBSxSBAdminSchools.log"); handler.setFormatter(new SimpleFormatter()); logger.addHandler(handler); logger.info("Admin Viewing List of Schools."); SchoolDAO schoolDAO = new SchoolDAO(); ScheduleBlockDAO scheduleBlockDAO = new ScheduleBlockDAO(); List<Schools> schools = schoolDAO.allSchools(); logger.info("Returning list of schools..." + schools.size() + " schools found."); for (Schools school : schools) { List<Scheduleblocks> scheduleBlocks = scheduleBlockDAO .getSchoolsScheduleBlocks(school.getSchoolid()); String SB2Strings = ""; for (Scheduleblocks sb : scheduleBlocks) { SB2Strings += sb.toString(); } school.setScheduleblocks(SB2Strings); } model.addAttribute("school", schools); logger.info("Schools successfully added to model."); handler.close(); } catch (IOException ex) { logger.log(Level.SEVERE, null, ex); } catch (SecurityException ex) { logger.log(Level.SEVERE, null, ex); } return "admin"; }
From source file:com.frostvoid.trekwar.server.TrekwarServer.java
/** * Initiates logging/*from ww w . j a v a 2s. c o m*/ * * @throws IOException */ private static void initLogging() throws IOException { FileHandler fh = new FileHandler(galaxyFileName + ".log"); fh.setLevel(LOG.getLevel()); Formatter logFormat = new Formatter() { @Override public String format(LogRecord rec) { StringBuilder buf = new StringBuilder(200); buf.append("#"); buf.append(new java.util.Date()); buf.append(' '); buf.append(rec.getLevel()); buf.append(' '); buf.append(rec.getSourceClassName()).append(".").append(rec.getSourceMethodName()); buf.append(":\n"); buf.append(formatMessage(rec)); buf.append('\n'); return buf.toString(); } }; fh.setFormatter(logFormat); ConsoleHandler ch = new ConsoleHandler(); ch.setLevel(LOG.getLevel()); Formatter conlogFormat = new Formatter() { @Override public String format(LogRecord rec) { StringBuilder buf = new StringBuilder(200); buf.append(rec.getLevel()); buf.append(": "); buf.append(formatMessage(rec)); buf.append('\n'); return buf.toString(); } }; ch.setFormatter(conlogFormat); LOG.addHandler(fh); LOG.addHandler(ch); }
From source file:edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean.java
/** * Harvest an individual Dataverse//from w w w . jav a 2 s . co m * @param dataverseId */ public void doHarvesting(Long dataverseId) throws IOException { HarvestingDataverse dataverse = em.find(HarvestingDataverse.class, dataverseId); MutableBoolean harvestErrorOccurred = new MutableBoolean(false); String logTimestamp = logFormatter.format(new Date()); Logger hdLogger = Logger.getLogger("edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean." + dataverse.getVdc().getAlias() + logTimestamp); String logFileName = FileUtil.getImportFileDir() + File.separator + "harvest_" + dataverse.getVdc().getAlias() + logTimestamp + ".log"; FileHandler fileHandler = new FileHandler(logFileName); hdLogger.addHandler(fileHandler); List<Long> harvestedStudyIds = null; this.processedSizeThisBatch = 0; this.harvestedStudyIdsThisBatch = new ArrayList<Long>(); List<String> failedIdentifiers = new ArrayList<String>(); try { boolean harvestingNow = dataverse.isHarvestingNow(); if (harvestingNow) { harvestErrorOccurred.setValue(true); hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Dataverse " + dataverse.getVdc().getName() + " is currently being harvested."); } else { harvestingDataverseService.resetHarvestingStatus(dataverse.getId()); String until = null; // If we don't set until date, we will get all the changes since the last harvest. String from = null; Date lastSuccessfulHarvestTime = dataverse.getLastSuccessfulHarvestTime(); if (lastSuccessfulHarvestTime != null) { from = formatter.format(lastSuccessfulHarvestTime); } if (dataverse.isOai() || dataverse.isNesstar()) { harvestingDataverseService.setHarvestingNow(dataverse.getId(), true); Date currentTime = new Date(); harvestingDataverseService.setLastHarvestTime(dataverse.getId(), currentTime); hdLogger.log(Level.INFO, "BEGIN HARVEST..., oaiUrl=" + dataverse.getServerUrl() + ",set=" + dataverse.getHarvestingSet() + ", metadataPrefix=" + dataverse.getHarvestFormatType().getMetadataPrefix() + ", from=" + from + ", until=" + until); if (dataverse.isOai()) { harvestedStudyIds = harvestOAI(dataverse, hdLogger, from, until, harvestErrorOccurred, failedIdentifiers); } else { harvestedStudyIds = harvestNesstar(dataverse, hdLogger, harvestErrorOccurred, failedIdentifiers); } harvestingDataverseService.setHarvestSuccess(dataverse.getId(), currentTime, harvestedStudyIds.size(), failedIdentifiers.size()); hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + dataverse.getServerUrl() + ", metadataPrefix=" + dataverse.getHarvestFormatType().getMetadataPrefix()); if (harvestedStudyIds.size() > 0) { harvestingDataverseService.setHarvestSuccessNotEmpty(dataverse.getId(), currentTime, harvestedStudyIds.size(), failedIdentifiers.size()); hdLogger.log(Level.INFO, "COMPLETED HARVEST with results"); } // now index all studies (need to modify for update) if (this.processedSizeThisBatch > 0) { hdLogger.log(Level.INFO, "POST HARVEST, reindexing the remaining studies."); if (this.harvestedStudyIdsThisBatch != null) { hdLogger.log(Level.INFO, this.harvestedStudyIdsThisBatch.size() + " studies in the batch"); } hdLogger.log(Level.INFO, this.processedSizeThisBatch + " bytes of content"); indexService.updateIndexList(this.harvestedStudyIdsThisBatch); hdLogger.log(Level.INFO, "POST HARVEST, calls to index finished."); } else { hdLogger.log(Level.INFO, "(All harvested content already reindexed)"); } } else { harvestErrorOccurred.setValue(true); harvestingDataverseService.setHarvestFailure(dataverse.getId(), harvestedStudyIds.size(), failedIdentifiers.size()); hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Unknown harvest type."); } } mailService.sendHarvestNotification(vdcNetworkService.find().getSystemEmail(), dataverse.getVdc().getName(), logFileName, logTimestamp, harvestErrorOccurred.booleanValue(), harvestedStudyIds.size(), failedIdentifiers); } catch (Throwable e) { harvestErrorOccurred.setValue(true); String message = "Exception processing harvest, server= " + dataverse.getServerUrl() + ",format=" + dataverse.getHarvestFormatType().getMetadataPrefix() + " " + e.getClass().getName() + " " + e.getMessage(); hdLogger.log(Level.SEVERE, message); logException(e, hdLogger); hdLogger.log(Level.INFO, "HARVEST NOT COMPLETED DUE TO UNEXPECTED ERROR."); harvestingDataverseService.setHarvestFailure(dataverse.getId(), harvestedStudyIds.size(), failedIdentifiers.size()); } finally { harvestingDataverseService.setHarvestingNow(dataverse.getId(), false); fileHandler.close(); hdLogger.removeHandler(fileHandler); } }
From source file:BSxSB.Controllers.AdminController.java
@RequestMapping(value = "/acceptaccount", method = RequestMethod.POST) public String acceptAccount(Model model, @RequestParam(value = "email") String email) { try {/* w w w.j a v a 2 s. c o m*/ //Initialize the file that the logger writes to. Handler handler = new FileHandler("%tBSxSBAdminStudentAccts.log"); logger.addHandler(handler); handler.setFormatter(new SimpleFormatter()); StudentDAO.acceptAccount(email); Students student = StudentDAO.getStudent(email); EmailNotification.sendEmail(student.getEmail(), student.getFirstname()); List<Students> accountrequests = StudentDAO.getAccountRequests(); model.addAttribute("accountrequests", accountrequests); logger.info("Successfully accepted: " + email); logger.info("Account successfully updated to model"); handler.close(); } catch (IOException ex) { logger.log(Level.SEVERE, null, ex); } catch (SecurityException ex) { logger.log(Level.SEVERE, null, ex); } return "adminmanagerequests"; }
From source file:BSxSB.Controllers.AdminController.java
@RequestMapping(value = "/rejectaccount", method = RequestMethod.POST) public String rejectAccount(Model model, @RequestParam(value = "email") String email) { try {/*ww w.j av a 2 s .c om*/ //Initialize the file that the logger writes to. Handler handler = new FileHandler("%tBSxSBAdminStudentAccts.log"); logger.addHandler(handler); handler.setFormatter(new SimpleFormatter()); StudentDAO.deleteAccount(email); List<Students> accountrequests = StudentDAO.getAccountRequests(); model.addAttribute("accountrequests", accountrequests); logger.info("Successfully rejected: " + email); logger.info("Accounts successfully updated to model"); handler.close(); } catch (IOException ex) { logger.log(Level.SEVERE, null, ex); } catch (SecurityException ex) { logger.log(Level.SEVERE, null, ex); } return "adminmanagerequests"; }
From source file:com.diversityarrays.kdxplore.KDXplore.java
static private java.util.logging.Logger establishLogger(ApplicationFolder appFolder) { // Establish logger java.util.logging.Logger logger = null; try {/* w w w . j ava 2s . co m*/ File applicationFolder = appFolder.getApplicationFolder(); logger = Shared.Log.getLogger(); if (logger == null) { String kdxploreLog = appFolder.getApplicationName().toLowerCase() + ".log"; //$NON-NLS-1$ File logFile = new File(applicationFolder, kdxploreLog); if (logFile.exists()) { File bakFile = new File(applicationFolder, kdxploreLog + ".bak"); //$NON-NLS-1$ if (bakFile.exists()) { bakFile.delete(); } logFile.renameTo(bakFile); } java.util.logging.FileHandler fh = new FileHandler(kdxploreLog); fh.setFormatter(new SimpleFormatter()); logger = java.util.logging.Logger.getLogger(appFolder.getApplicationName()); logger.addHandler(fh); Shared.Log.setLogger(logger); logger.info("==== Log Started ===="); //$NON-NLS-1$ } ExplorerProperties.getInstance(applicationFolder); } catch (IOException e1) { JOptionPane.showMessageDialog(null, e1.getMessage(), "Unable to initialise environment", //$NON-NLS-1$ JOptionPane.ERROR_MESSAGE); System.exit(1); } return logger; }
From source file:BSxSB.Controllers.AdminController.java
@RequestMapping(value = "/acceptallaccount", method = RequestMethod.POST) public String acceptAllAccount(Model model) { try {//from w w w .j a v a2 s. com //Initialize the file that the logger writes to. Handler handler = new FileHandler("%tBSxSBAdminStudentAccts.log"); logger.addHandler(handler); handler.setFormatter(new SimpleFormatter()); StudentDAO.acceptAllAccount(); List<Students> accountrequests = StudentDAO.getAccountRequests(); model.addAttribute("accountrequests", accountrequests); logger.info("Successfully accepted all accounts"); logger.info("Accounts successfully updated to model"); handler.close(); } catch (IOException ex) { logger.log(Level.SEVERE, null, ex); } catch (SecurityException ex) { logger.log(Level.SEVERE, null, ex); } return "adminmanagerequests"; }