List of usage examples for java.util.logging Logger log
public void log(Level level, Supplier<String> msgSupplier)
From source file:nl.strohalm.cyclos.utils.logging.LoggingHandler.java
/** * Logs an user login/*from ww w . j av a 2 s. c o m*/ */ public void traceLogin(final String remoteAddress, final User user, final String sessionId) { final Logger logger = getTraceLogger(); final Level level = TraceLevel.SIMPLE.getLevel(); if (logger.isLoggable(level)) { try { logger.log(level, remoteAddress + " - Login for " + user + " under session id " + sessionId); } catch (final Exception e) { System.out.println("Error generating log on " + settingsService.getLogSettings().getTraceFile()); } } }
From source file:nl.strohalm.cyclos.utils.logging.LoggingHandler.java
/** * Logs an user logout/*from w w w . j a va 2 s . c om*/ */ public void traceLogout(final String remoteAddress, final User user, final String sessionId) { final Logger logger = getTraceLogger(); final Level level = TraceLevel.SIMPLE.getLevel(); if (logger.isLoggable(level)) { try { logger.log(level, remoteAddress + " - Logout for " + user + " under session id " + sessionId); } catch (final Exception e) { System.out.println("Error generating log on " + settingsService.getLogSettings().getTraceFile()); } } }
From source file:nl.strohalm.cyclos.utils.logging.LoggingHandler.java
/** * Log a permission denied on action execution *//* w ww . j a v a2s . c o m*/ public void logPermissionDenied(final User user, final Method method, final Object[] args) { final Logger logger = getTraceLogger(); final Level level = TraceLevel.ERRORS.getLevel(); if (logger.isLoggable(level)) { try { logger.log(level, "Permission denied for " + buildActionString(user, null, method, args, null, false)); } catch (final Exception e) { System.out.println("Error generating log on " + settingsService.getLogSettings().getTraceFile()); } } }
From source file:nl.strohalm.cyclos.utils.logging.LoggingHandler.java
/** * Logs that a request has been rejected because the account status queue is full *///from w ww. j a va2 s. c o m public void logRequestRejectedOnSystemOverloaded(final String uri, final String remoteAddress) { final Logger logger = getTraceLogger(); final Level level = TraceLevel.ERRORS.getLevel(); if (logger.isLoggable(level)) { try { logger.log(level, "A request to " + uri + " from " + remoteAddress + " was rejected because the system is overloaded"); } catch (final Exception e) { System.out.println("Error generating log on " + settingsService.getLogSettings().getTraceFile()); } } }
From source file:nl.strohalm.cyclos.utils.logging.LoggingHandler.java
private void log(final Logger logger, final Level logLevel, final String logFile, final String remoteAddress, final String message) { if (logLevel != null) { try {/*w w w . j a va2 s. co m*/ final String prefix = StringUtils.isEmpty(remoteAddress) ? "" : remoteAddress + " - "; logger.log(logLevel, prefix + message); } catch (final Exception e) { System.out.printf("Error generating log on %1$s: %2$s%n", logFile, e.getMessage()); } } }
From source file:edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean.java
private void handleOAIError(Logger hdLogger, OAIPMHtype oaiObj, String message) { for (Iterator it = oaiObj.getError().iterator(); it.hasNext();) { OAIPMHerrorType error = (OAIPMHerrorType) it.next(); message += ", error code: " + error.getCode(); message += ", error value: " + error.getValue(); hdLogger.log(Level.SEVERE, message); }//from w w w. ja v a2 s .c om }
From source file:edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean.java
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED) public Long getRecord(Logger hdLogger, HarvestingDataverse dataverse, String identifier, String metadataPrefix, MutableBoolean recordErrorOccurred) { String errMessage = null;/*w ww . ja va 2 s. co m*/ Study harvestedStudy = null; String oaiUrl = dataverse.getServerUrl(); try { hdLogger.log(Level.INFO, "Calling GetRecord: oaiUrl =" + oaiUrl + "?verb=GetRecord&identifier=" + identifier + "&metadataPrefix=" + metadataPrefix); DvnFastGetRecord record = new DvnFastGetRecord(oaiUrl, identifier, metadataPrefix); errMessage = record.getErrorMessage(); //errMessage=null; if (errMessage != null) { hdLogger.log(Level.SEVERE, "Error calling GetRecord - " + errMessage); } else if (record.isDeleted()) { hdLogger.log(Level.INFO, "Received 'deleted' status from OAI Server."); Study study = studyService.getStudyByHarvestInfo(dataverse.getVdc(), identifier); if (study != null) { hdLogger.log(Level.INFO, "Deleting study " + study.getGlobalId()); studyService.deleteStudy(study.getId()); } else { hdLogger.log(Level.INFO, "No study found for this record, skipping delete. "); } } else { hdLogger.log(Level.INFO, "Successfully retreived GetRecord response."); VDCUser networkAdmin = vdcNetworkService.find().getDefaultNetworkAdmin(); harvestedStudy = studyService.importHarvestStudy(record.getMetadataFile(), dataverse.getVdc().getId(), networkAdmin.getId(), identifier); //hdLogger.log(Level.INFO, "imported study (step 1., no data); proceeding with step 2."); //studyService.importHarvestStudyExperimental(harvestedStudyFile, harvestedStudy); hdLogger.log(Level.INFO, "Harvest Successful for identifier " + identifier); this.processedSizeThisBatch += record.getMetadataFile().length(); if (this.harvestedStudyIdsThisBatch == null) { this.harvestedStudyIdsThisBatch = new ArrayList<Long>(); } this.harvestedStudyIdsThisBatch.add(harvestedStudy.getId()); if (this.processedSizeThisBatch > 10000000) { hdLogger.log(Level.INFO, "REACHED CONTENT BATCH SIZE LIMIT; calling index (" + this.harvestedStudyIdsThisBatch.size() + " studies in the batch)."); indexService.updateIndexList(this.harvestedStudyIdsThisBatch); hdLogger.log(Level.INFO, "REINDEX DONE."); this.processedSizeThisBatch = 0; this.harvestedStudyIdsThisBatch = null; } } } catch (Throwable e) { errMessage = "Exception processing getRecord(), oaiUrl=" + oaiUrl + ",identifier=" + identifier + " " + e.getClass().getName() + " " + e.getMessage(); hdLogger.log(Level.SEVERE, errMessage); logException(e, hdLogger); } // If we got an Error from the OAI server or an exception happened during import, then // set recordErrorOccurred to true (if recordErrorOccurred is being used) // otherwise throw an exception (if recordErrorOccurred is not used, i.e null) if (errMessage != null) { if (recordErrorOccurred != null) { recordErrorOccurred.setValue(true); } else { throw new EJBException(errMessage); } } return harvestedStudy != null ? harvestedStudy.getId() : null; }
From source file:org.conf4j.service.ConfServiceInstance.java
@Override public final void initFolders() { final Logger log = Logger.getLogger(LOGGER_CAT); final List<String> propertyNames = getKeys(); for (int i = 0; i < propertyNames.size(); i++) { final String propertyName = propertyNames.get(i); if (!propertyName.endsWith("_dir")) { continue; }//from w w w. j a v a 2 s. co m final String folderPath = getValue(propertyName); if (folderPath == null || folderPath.trim().length() <= 0) { log.info(DIR_VAR_0_NOT_SET.format(new String[] { propertyName })); continue; } final File folderFile = new File(folderPath); try { if (!folderFile.exists()) { final boolean success = folderFile.mkdirs(); if (success) { log.log(Level.INFO, DIR_VAR_0_PATH_1_CREATED_ABSPATH_2 .format(new String[] { propertyName, folderPath, folderFile.getAbsolutePath() })); } else { log.log(Level.SEVERE, DIR_VAR_0_PATH_1_CREATION_FAILED.format(new String[] { propertyName, folderPath })); } } else { log.log(Level.INFO, DIR_VAR_0_PATH_1_EXISTS.format(new String[] { propertyName, folderPath })); } } catch (SecurityException e) { log.log(Level.WARNING, DIR_VAR_0_PATH_1_CHECKS_FAILURE.format(new String[] { propertyName, folderPath })); } } }
From source file:edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean.java
private List<Long> harvestNesstar(HarvestingDataverse dataverse, Logger hdLogger, MutableBoolean harvestErrorOccurred, List<String> failedIdentifiers) throws MalformedURLException { VDCUser networkAdmin = vdcNetworkService.find().getDefaultNetworkAdmin(); int count = 0; List<Long> harvestedStudyIds = new ArrayList<Long>(); hdLogger.log(Level.INFO, "BEGIN HARVEST..., nesstarServer=" + dataverse.getServerUrl() + ", metadataPrefix=" + dataverse.getHarvestFormatType().getMetadataPrefix()); //Instantiate the NesstarHarvester class: NesstarHarvester nh = new NesstarHarvester(); //Add a server (remember to use a standards compliant URL) nh.addServer(dataverse.getServerUrl()); hdLogger.log(Level.INFO,/*from ww w . ja va 2s. c o m*/ "Created an instance of NesstarHarvester; about to start harvest/retreival of Nesstar DDIs"); //Harvest the server: DDI[] ddis = nh.harvest(); hdLogger.log(Level.INFO, "Completed NesstarHarvester.harvest()"); if (ddis != null) { hdLogger.log(Level.INFO, "NesstarHarvester.harvest() returned a list of " + ddis.length + " DDIs; attempting to import."); for (DDI ddi : ddis) { count++; Writer out = null; try { //URL nesstarStudyURL = ddi.getStudy(); //if (nesstarStudyURL != null) { // hdLogger.log(Level.INFO, "Nesstar study URL: "+nesstarStudyURL.toString()+"; Bookmark: "+ddi.getBookmark()); //} File xmlFile = File.createTempFile("study", ".xml"); out = new BufferedWriter(new FileWriter(xmlFile)); out.write(ddi.getXml()); out.close(); //Study harvestedStudy = studyService.importHarvestStudy(xmlFile, dataverse.getVdc().getId(), networkAdmin.getId(), String.valueOf(++count) ); Study harvestedStudy = studyService.importHarvestStudy(xmlFile, dataverse.getVdc().getId(), networkAdmin.getId(), null); if (harvestedStudy != null) { hdLogger.log(Level.INFO, "local (database) id for the imported Nesstar study: " + harvestedStudy.getId()); if (harvestedStudy.getId() != null && !(harvestedStudy.getId().equals(""))) { if (!(harvestedStudyIds.contains(harvestedStudy.getId()))) { harvestedStudyIds.add(harvestedStudy.getId()); } else { hdLogger.log(Level.INFO, "Note: id " + harvestedStudy.getId() + " is already on the map - skipping!"); } } else { hdLogger.log(Level.WARNING, "Note: importHarvestStudy returned null or empty study id!"); } } else { hdLogger.log(Level.WARNING, "importHarvestStudy() returned null study!"); } } catch (Exception e) { String errMessage = "Exception Importing Nesstar DDI, identifier not available, sequential number in the harvest batch: " + String.valueOf(count) + " " + e.getClass().getName() + " " + e.getMessage(); hdLogger.log(Level.SEVERE, errMessage); //logException(e, hdLogger); failedIdentifiers.add(String.valueOf(count)); } } } else { hdLogger.log(Level.WARNING, "NesstarHarvester.harvest() returned a null list of DDIs"); } return harvestedStudyIds; }
From source file:edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean.java
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED) public Long processRecord(DataverseRequest dataverseRequest, Logger hdLogger, PrintWriter importCleanupLog, OaiHandler oaiHandler, String identifier, MutableBoolean recordErrorOccurred, MutableLong processedSizeThisBatch, List<String> deletedIdentifiers) { String errMessage = null;//from w ww . ja va 2 s . co m Dataset harvestedDataset = null; logGetRecord(hdLogger, oaiHandler, identifier); File tempFile = null; try { FastGetRecord record = oaiHandler.runGetRecord(identifier); errMessage = record.getErrorMessage(); if (errMessage != null) { hdLogger.log(Level.SEVERE, "Error calling GetRecord - " + errMessage); } else if (record.isDeleted()) { hdLogger.info( "Deleting harvesting dataset for " + identifier + ", per the OAI server's instructions."); Dataset dataset = datasetService .getDatasetByHarvestInfo(oaiHandler.getHarvestingClient().getDataverse(), identifier); if (dataset != null) { hdLogger.info("Deleting dataset " + dataset.getGlobalId()); deleteHarvestedDataset(dataset, dataverseRequest, hdLogger); // TODO: // check the status of that Delete - see if it actually succeeded deletedIdentifiers.add(identifier); } else { hdLogger.info("No dataset found for " + identifier + ", skipping delete. "); } } else { hdLogger.info("Successfully retrieved GetRecord response."); tempFile = record.getMetadataFile(); PrintWriter cleanupLog; harvestedDataset = importService.doImportHarvestedDataset(dataverseRequest, oaiHandler.getHarvestingClient(), identifier, oaiHandler.getMetadataPrefix(), record.getMetadataFile(), importCleanupLog); hdLogger.fine("Harvest Successful for identifier " + identifier); hdLogger.fine("Size of this record: " + record.getMetadataFile().length()); processedSizeThisBatch.add(record.getMetadataFile().length()); } } catch (Throwable e) { logGetRecordException(hdLogger, oaiHandler, identifier, e); errMessage = "Caught exception while executing GetRecord on " + identifier; //logException(e, hdLogger); } finally { if (tempFile != null) { // temporary - let's not delete the temp metadata file if anything went wrong, for now: if (errMessage == null) { try { tempFile.delete(); } catch (Throwable t) { } ; } } } // TODO: the message below is taken from DVN3; - figure out what it means... // // If we got an Error from the OAI server or an exception happened during import, then // set recordErrorOccurred to true (if recordErrorOccurred is being used) // otherwise throw an exception (if recordErrorOccurred is not used, i.e null) if (errMessage != null) { if (recordErrorOccurred != null) { recordErrorOccurred.setValue(true); } else { throw new EJBException(errMessage); } } return harvestedDataset != null ? harvestedDataset.getId() : null; }