List of usage examples for java.util.logging Logger addHandler
public void addHandler(Handler handler) throws SecurityException
From source file:org.apache.directory.studio.connection.core.io.jndi.LdifSearchLogger.java
/** * Inits the search logger./*from ww w. j av a 2s . c o m*/ */ private void initSearchLogger(Connection connection) { Logger logger = Logger.getAnonymousLogger(); loggers.put(connection.getId(), logger); logger.setLevel(Level.ALL); String logfileName = ConnectionManager.getSearchLogFileName(connection); try { FileHandler fileHandler = new FileHandler(logfileName, getFileSizeInKb() * 1000, getFileCount(), true); fileHandlers.put(connection.getId(), fileHandler); fileHandler.setFormatter(new Formatter() { public String format(LogRecord record) { return record.getMessage(); } }); logger.addHandler(fileHandler); } catch (SecurityException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }
From source file:edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean.java
/** * Harvest an individual Dataverse// w ww . ja va 2s. co m * @param dataverseId */ public void doHarvesting(Long dataverseId) throws IOException { HarvestingDataverse dataverse = em.find(HarvestingDataverse.class, dataverseId); MutableBoolean harvestErrorOccurred = new MutableBoolean(false); String logTimestamp = logFormatter.format(new Date()); Logger hdLogger = Logger.getLogger("edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean." + dataverse.getVdc().getAlias() + logTimestamp); String logFileName = FileUtil.getImportFileDir() + File.separator + "harvest_" + dataverse.getVdc().getAlias() + logTimestamp + ".log"; FileHandler fileHandler = new FileHandler(logFileName); hdLogger.addHandler(fileHandler); List<Long> harvestedStudyIds = null; this.processedSizeThisBatch = 0; this.harvestedStudyIdsThisBatch = new ArrayList<Long>(); List<String> failedIdentifiers = new ArrayList<String>(); try { boolean harvestingNow = dataverse.isHarvestingNow(); if (harvestingNow) { harvestErrorOccurred.setValue(true); hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Dataverse " + dataverse.getVdc().getName() + " is currently being harvested."); } else { harvestingDataverseService.resetHarvestingStatus(dataverse.getId()); String until = null; // If we don't set until date, we will get all the changes since the last harvest. String from = null; Date lastSuccessfulHarvestTime = dataverse.getLastSuccessfulHarvestTime(); if (lastSuccessfulHarvestTime != null) { from = formatter.format(lastSuccessfulHarvestTime); } if (dataverse.isOai() || dataverse.isNesstar()) { harvestingDataverseService.setHarvestingNow(dataverse.getId(), true); Date currentTime = new Date(); harvestingDataverseService.setLastHarvestTime(dataverse.getId(), currentTime); hdLogger.log(Level.INFO, "BEGIN HARVEST..., oaiUrl=" + dataverse.getServerUrl() + ",set=" + dataverse.getHarvestingSet() + ", metadataPrefix=" + dataverse.getHarvestFormatType().getMetadataPrefix() + ", from=" + from + ", until=" + until); if (dataverse.isOai()) { harvestedStudyIds = harvestOAI(dataverse, hdLogger, from, until, harvestErrorOccurred, failedIdentifiers); } else { harvestedStudyIds = harvestNesstar(dataverse, hdLogger, harvestErrorOccurred, failedIdentifiers); } harvestingDataverseService.setHarvestSuccess(dataverse.getId(), currentTime, harvestedStudyIds.size(), failedIdentifiers.size()); hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + dataverse.getServerUrl() + ", metadataPrefix=" + dataverse.getHarvestFormatType().getMetadataPrefix()); if (harvestedStudyIds.size() > 0) { harvestingDataverseService.setHarvestSuccessNotEmpty(dataverse.getId(), currentTime, harvestedStudyIds.size(), failedIdentifiers.size()); hdLogger.log(Level.INFO, "COMPLETED HARVEST with results"); } // now index all studies (need to modify for update) if (this.processedSizeThisBatch > 0) { hdLogger.log(Level.INFO, "POST HARVEST, reindexing the remaining studies."); if (this.harvestedStudyIdsThisBatch != null) { hdLogger.log(Level.INFO, this.harvestedStudyIdsThisBatch.size() + " studies in the batch"); } hdLogger.log(Level.INFO, this.processedSizeThisBatch + " bytes of content"); indexService.updateIndexList(this.harvestedStudyIdsThisBatch); hdLogger.log(Level.INFO, "POST HARVEST, calls to index finished."); } else { hdLogger.log(Level.INFO, "(All harvested content already reindexed)"); } } else { harvestErrorOccurred.setValue(true); harvestingDataverseService.setHarvestFailure(dataverse.getId(), harvestedStudyIds.size(), failedIdentifiers.size()); hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Unknown harvest type."); } } mailService.sendHarvestNotification(vdcNetworkService.find().getSystemEmail(), dataverse.getVdc().getName(), logFileName, logTimestamp, harvestErrorOccurred.booleanValue(), harvestedStudyIds.size(), failedIdentifiers); } catch (Throwable e) { harvestErrorOccurred.setValue(true); String message = "Exception processing harvest, server= " + dataverse.getServerUrl() + ",format=" + dataverse.getHarvestFormatType().getMetadataPrefix() + " " + e.getClass().getName() + " " + e.getMessage(); hdLogger.log(Level.SEVERE, message); logException(e, hdLogger); hdLogger.log(Level.INFO, "HARVEST NOT COMPLETED DUE TO UNEXPECTED ERROR."); harvestingDataverseService.setHarvestFailure(dataverse.getId(), harvestedStudyIds.size(), failedIdentifiers.size()); } finally { harvestingDataverseService.setHarvestingNow(dataverse.getId(), false); fileHandler.close(); hdLogger.removeHandler(fileHandler); } }
From source file:nl.strohalm.cyclos.utils.logging.LoggingHandler.java
/** * Creates a new logger// w w w . j a v a 2 s .c o m */ private Logger init(final Level level, final String file) { final LogSettings logSettings = settingsService.getLogSettings(); final Logger logger = Logger.getAnonymousLogger(); logger.setLevel(level); logger.setUseParentHandlers(false); try { final FileUnits units = logSettings.getMaxLengthPerFileUnits(); final FileHandler fileHandler = new FileHandler(file, units.calculate(logSettings.getMaxLengthPerFile()), logSettings.getMaxFilesPerLog(), true); fileHandler.setFormatter(logFormatter); fileHandler.setEncoding(settingsService.getLocalSettings().getCharset()); logger.addHandler(fileHandler); } catch (final Exception e) { final ConsoleHandler consoleHandler = new ConsoleHandler(); consoleHandler.setFormatter(logFormatter); try { consoleHandler.setEncoding(settingsService.getLocalSettings().getCharset()); } catch (final Exception e1) { // Just ignore } logger.addHandler(consoleHandler); logger.log(Level.WARNING, "Unable to create logger for file " + file); } return logger; }
From source file:com.yahoo.dba.perf.myperf.common.MyPerfContext.java
private void configureLogging() { Logger logger = Logger.getLogger(""); try {// w w w .ja v a2 s . c o m logger.setLevel(Level.parse(getLogLevel())); } catch (Exception ex) { logger.setLevel(Level.INFO); } try { for (Handler h : logger.getHandlers()) { if (h instanceof java.util.logging.ConsoleHandler) h.setLevel(Level.SEVERE); } String logRoot = System.getProperty("logPath", "."); java.util.logging.FileHandler fileHandler = new java.util.logging.FileHandler( logRoot + File.separatorChar + getLogPath(), this.logFileSize, this.logFileCount); fileHandler.setLevel(logger.getLevel()); fileHandler.setFormatter(new SimpleFormatter()); logger.addHandler(fileHandler); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:org.deviceconnect.android.localoauth.LocalOAuth2Main.java
/** * ./*from w w w. ja v a 2s .com*/ */ public LocalOAuth2Main(final android.content.Context context, final String dbName) { // Logger logger = sLogger; if (BuildConfig.DEBUG) { AndroidHandler handler = new AndroidHandler(logger.getName()); handler.setFormatter(new SimpleFormatter()); handler.setLevel(Level.ALL); logger.addHandler(handler); logger.setLevel(Level.ALL); } else { logger.setLevel(Level.OFF); } mContext = context; // DB?? mDbHelper = new LocalOAuthOpenHelper(context, dbName); mDb = mDbHelper.getWritableDatabase(); // ?? mUserManager = new SampleUserManager(); mClientManager = new SQLiteClientManager(mDb); mTokenManager = new SQLiteTokenManager(mDb); // addUserData(SampleUser.LOCALOAUTH_USER, SampleUser.LOCALOAUTH_PASS); register(context); }
From source file:org.ebayopensource.turmeric.tools.errorlibrary.ErrorLibraryFileGenerationTest.java
@Test public void testInvalidNumberOfEntryBetweenErrorDataAndErrorProperties2() throws Exception { testingdir.ensureEmpty();//from ww w. j a v a2 s.co m File rootDir = testingdir.getDir(); File destDir = new File(rootDir, "gen-src"); // @formatter:off String[] inputArgs1 = { "-gentype", "genTypeErrorLibAll", "-pr", rootDir.getAbsolutePath(), "-domain", "security", "-errorlibname", "TestErrorLibrary", "-dest", destDir.getAbsolutePath() }; // @formatter:on copyErrorXmlToProjectRoot("Invalid2ErrorData_QA.xml", rootDir, "security"); copyErrorPropertiesToProjectRoot("Invalid2QAErrors.properties", rootDir, "security"); File propFile = createDomainPropertiesFile(rootDir, "TestErrorLibrary"); Properties props = new Properties(); props.setProperty("listOfDomains", "security"); storeProps(propFile, props); Logger errlogger = Logger.getLogger(ErrorLibraryUtils.class.getPackage().getName()); ExpectedLogMessage expectedLog = new ExpectedLogMessage(); expectedLog.setExpectedMessage("The Errors.properties file has more " + "errors defined in addition to those existing in " + "ErrorData.xml."); errlogger.addHandler(expectedLog); try { performDirectCodeGen(inputArgs1); expectedLog.assertFoundMessage(); } finally { errlogger.removeHandler(expectedLog); } }
From source file:processing.app.Base.java
static public void initLogger() { Handler consoleHandler = new ConsoleLogger(); consoleHandler.setLevel(Level.ALL); consoleHandler.setFormatter(new LogFormatter("%1$tl:%1$tM:%1$tS [%4$7s] %2$s: %5$s%n")); Logger globalLogger = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME); globalLogger.setLevel(consoleHandler.getLevel()); // Remove default Handler[] handlers = globalLogger.getHandlers(); for (Handler handler : handlers) { globalLogger.removeHandler(handler); }/* w w w . j a v a 2 s . co m*/ Logger root = Logger.getLogger(""); handlers = root.getHandlers(); for (Handler handler : handlers) { root.removeHandler(handler); } globalLogger.addHandler(consoleHandler); Logger.getLogger("cc.arduino.packages.autocomplete").setParent(globalLogger); Logger.getLogger("br.com.criativasoft.cpluslibparser").setParent(globalLogger); Logger.getLogger(Base.class.getPackage().getName()).setParent(globalLogger); }
From source file:org.tigase.messenger.phone.pro.service.XMPPService.java
public XMPPService() { Logger logger = Logger.getLogger("tigase.jaxmpp"); Handler handler = new AndroidLoggingHandler(); handler.setLevel(Level.ALL);//w ww .j a v a 2 s.c o m logger.addHandler(handler); logger.setLevel(Level.ALL); }
From source file:com.cyberway.issue.crawler.framework.CrawlController.java
private void setupLogFile(Logger logger, String filename, Formatter f, boolean shouldManifest) throws IOException, SecurityException { GenerationFileHandler fh = new GenerationFileHandler(filename, true, shouldManifest); fh.setFormatter(f);//from w w w . j a v a 2 s .c o m logger.addHandler(fh); addToManifest(filename, MANIFEST_LOG_FILE, shouldManifest); logger.setUseParentHandlers(false); this.fileHandlers.put(logger, fh); }
From source file:com.cyberway.issue.crawler.framework.CrawlController.java
protected void rotateLogFiles(String generationSuffix) throws IOException { if (this.state != PAUSED && this.state != CHECKPOINTING) { throw new IllegalStateException("Pause crawl before requesting " + "log rotation."); }/*from w w w .j a v a2 s . c o m*/ for (Iterator i = fileHandlers.keySet().iterator(); i.hasNext();) { Logger l = (Logger) i.next(); GenerationFileHandler gfh = (GenerationFileHandler) fileHandlers.get(l); GenerationFileHandler newGfh = gfh.rotate(generationSuffix, CURRENT_LOG_SUFFIX); if (gfh.shouldManifest()) { addToManifest((String) newGfh.getFilenameSeries().get(1), MANIFEST_LOG_FILE, newGfh.shouldManifest()); } l.removeHandler(gfh); l.addHandler(newGfh); fileHandlers.put(l, newGfh); } }