Example usage for java.util.logging Logger log

List of usage examples for java.util.logging Logger log

Introduction

In this page you can find the example usage for java.util.logging Logger log.

Prototype

public void log(Level level, Supplier<String> msgSupplier) 

Source Link

Document

Log a message, which is only to be constructed if the logging level is such that the message will actually be logged.

Usage

From source file:nl.strohalm.cyclos.utils.logging.LoggingHandler.java

/**
 * Log an account fee transfer/*ww w  . j  a  v a  2  s . com*/
 */
public void logAccountFeeInvoice(final Invoice invoice) {
    final Logger logger = getAccountFeeLogger();
    final Level level = AccountFeeLevel.DETAILED.getLevel();
    if (logger.isLoggable(level)) {
        final UnitsConverter unitsConverter = settingsService.getLocalSettings()
                .getUnitsConverter(invoice.getTransferType().getFrom().getCurrency().getPattern());
        final String message = "Sent invoice of %s from %s";
        final Object[] params = { unitsConverter.toString(invoice.getAmount()),
                invoice.getToMember().getUsername() };
        try {
            logger.log(level, String.format(message, params));
        } catch (final Exception e) {
            System.out
                    .println("Error generating log on " + settingsService.getLogSettings().getAccountFeeFile());
        }
    }
}

From source file:edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean.java

/**
 * //from w ww  . ja  va2s .  co m
 * @param harvestingClient  the harvesting client object
 * @param hdLogger          custom logger (specific to this harvesting run)
 * @param harvestErrorOccurred  have we encountered any errors during harvest?
 * @param failedIdentifiers     Study Identifiers for failed "GetRecord" requests
 */
private List<Long> harvestOAI(DataverseRequest dataverseRequest, HarvestingClient harvestingClient,
        Logger hdLogger, PrintWriter importCleanupLog, MutableBoolean harvestErrorOccurred,
        List<String> failedIdentifiers, List<String> deletedIdentifiers,
        List<Long> harvestedDatasetIdsThisBatch)
        throws IOException, ParserConfigurationException, SAXException, TransformerException {

    logBeginOaiHarvest(hdLogger, harvestingClient);

    List<Long> harvestedDatasetIds = new ArrayList<Long>();
    MutableLong processedSizeThisBatch = new MutableLong(0L);
    OaiHandler oaiHandler;

    try {
        oaiHandler = new OaiHandler(harvestingClient);
    } catch (OaiHandlerException ohe) {
        String errorMessage = "Failed to create OaiHandler for harvesting client " + harvestingClient.getName()
                + "; " + ohe.getMessage();
        hdLogger.log(Level.SEVERE, errorMessage);
        throw new IOException(errorMessage);
    }

    try {
        for (Iterator<Header> idIter = oaiHandler.runListIdentifiers(); idIter.hasNext();) {

            Header h = idIter.next();
            String identifier = h.getIdentifier();

            hdLogger.info("processing identifier: " + identifier);

            MutableBoolean getRecordErrorOccurred = new MutableBoolean(false);

            // Retrieve and process this record with a separate GetRecord call:
            Long datasetId = processRecord(dataverseRequest, hdLogger, importCleanupLog, oaiHandler, identifier,
                    getRecordErrorOccurred, processedSizeThisBatch, deletedIdentifiers);

            hdLogger.info("Total content processed in this batch so far: " + processedSizeThisBatch);
            if (datasetId != null) {
                harvestedDatasetIds.add(datasetId);

                if (harvestedDatasetIdsThisBatch == null) {
                    harvestedDatasetIdsThisBatch = new ArrayList<Long>();
                }
                harvestedDatasetIdsThisBatch.add(datasetId);

            }

            if (getRecordErrorOccurred.booleanValue() == true) {
                failedIdentifiers.add(identifier);
                harvestErrorOccurred.setValue(true);
                //temporary:
                //throw new IOException("Exception occured, stopping harvest");
            }

            // reindexing in batches? - this is from DVN 3; 
            // we may not need it anymore. 
            if (processedSizeThisBatch.longValue() > INDEXING_CONTENT_BATCH_SIZE) {

                hdLogger.log(Level.INFO, "REACHED CONTENT BATCH SIZE LIMIT; calling index ("
                        + harvestedDatasetIdsThisBatch.size() + " datasets in the batch).");
                //indexService.updateIndexList(this.harvestedDatasetIdsThisBatch);
                hdLogger.log(Level.INFO, "REINDEX DONE.");

                processedSizeThisBatch.setValue(0L);
                harvestedDatasetIdsThisBatch = null;
            }

        }
    } catch (OaiHandlerException e) {
        throw new IOException("Failed to run ListIdentifiers: " + e.getMessage());
    }

    logCompletedOaiHarvest(hdLogger, harvestingClient);

    return harvestedDatasetIds;

}

From source file:nl.strohalm.cyclos.utils.logging.LoggingHandler.java

/**
 * Logs an action trace/*from  w  w  w.j a v  a2 s .  c  o  m*/
 */
public void trace(final String remoteAddress, final User user, final Permission permission, final Method method,
        final Object[] args, final Object retVal) {
    final Logger logger = getTraceLogger();
    final Level detailed = TraceLevel.DETAILED.getLevel();
    final Level normal = TraceLevel.SIMPLE.getLevel();
    final boolean detailedLoggable = logger.isLoggable(detailed);
    final boolean normalLoggable = logger.isLoggable(normal);
    final Level logLevel = detailedLoggable ? detailed : normalLoggable ? normal : null;
    if (logLevel != null) {
        final String prefix = StringUtils.isEmpty(remoteAddress) ? "" : remoteAddress + " - ";
        final String message = buildActionString(user, permission, method, args, retVal, true);
        try {
            logger.log(logLevel, prefix + message);
        } catch (final Exception e) {
            System.out.println("Error generating log on " + settingsService.getLogSettings().getTraceFile());
        }
    }
}

From source file:edu.emory.cci.aiw.i2b2etl.dest.I2b2QueryResultsHandler.java

/**
 * Creates a new query results handler that will use the provided
 * configuration file. This constructor, through the
 * <code>inferPropositionIdsNeeded</code> parameter, lets you control
 * whether proposition ids to be returned from the Protempa processing run
 * should be inferred from the i2b2 configuration file.
 *
 * @param confXML an i2b2 query results handler configuration file. Cannot
 * be <code>null</code>./*from   w w w  . j av a2  s . c o  m*/
 * @param inferPropositionIdsNeeded <code>true</code> if proposition ids to
 * be returned from the Protempa processing run should include all of those
 * specified in the i2b2 configuration file, <code>false</code> if the
 * proposition ids returned should be only those specified in the Protempa
 * {@link Query}.
 * @param dataInsertMode whether to truncate existing data or append to it
 */
I2b2QueryResultsHandler(Query query, DataSource dataSource, KnowledgeSource knowledgeSource,
        Configuration configuration, List<? extends ProtempaEventListener> eventListeners)
        throws QueryResultsHandlerInitException {
    if (dataSource == null) {
        throw new IllegalArgumentException("dataSource cannot be null");
    }
    if (knowledgeSource == null) {
        throw new IllegalArgumentException("knowledgeSource cannot be null");
    }
    Logger logger = I2b2ETLUtil.logger();
    this.query = query;
    this.knowledgeSource = knowledgeSource;
    this.configuration = configuration;
    logger.log(Level.FINE, String.format("Using configuration: %s", this.configuration.getName()));
    logger.log(Level.FINER, "STEP: read conf.xml");
    this.settings = this.configuration.getSettings();

    this.data = this.configuration.getData();
    this.conceptsSection = this.configuration.getConcepts();
    this.database = this.configuration.getDatabase();
    DatabaseSpec dataSchemaSpec = this.database.getDataSpec();
    if (dataSchemaSpec != null) {
        this.dataConnectionSpec = dataSchemaSpec.toConnectionSpec();
    } else {
        this.dataConnectionSpec = null;
    }

    DatabaseSpec metadataSchemaSpec = this.database.getMetadataSpec();
    if (metadataSchemaSpec != null) {
        this.metadataConnectionSpec = metadataSchemaSpec.toConnectionSpec();
    } else {
        this.metadataConnectionSpec = null;
    }

    this.providerFullNameSpec = this.data.get(this.settings.getProviderFullName());
    this.providerFirstNameSpec = this.data.get(this.settings.getProviderFirstName());
    this.providerMiddleNameSpec = this.data.get(this.settings.getProviderMiddleName());
    this.providerLastNameSpec = this.data.get(this.settings.getProviderLastName());
    this.visitPropId = this.settings.getVisitDimension();

    RemoveMethod removeMethod = this.settings.getDataRemoveMethod();
    if (removeMethod != null) {
        this.dataRemoveMethod = removeMethod;
    } else {
        this.dataRemoveMethod = RemoveMethod.TRUNCATE;
    }
    RemoveMethod metaRemoveMethod2 = this.settings.getMetaRemoveMethod();
    if (metaRemoveMethod2 != null) {
        this.metaRemoveMethod = metaRemoveMethod2;
    } else {
        this.metaRemoveMethod = RemoveMethod.TRUNCATE;
    }

    DataSourceBackend[] dsBackends = dataSource.getBackends();
    this.dataSourceBackendIds = new HashSet<>();
    for (int i = 0; i < dsBackends.length; i++) {
        String id = dsBackends[i].getId();
        if (id != null) {
            this.dataSourceBackendIds.add(id);
        }
    }
    String sourceSystemCd = this.settings.getSourceSystemCode();
    if (sourceSystemCd != null) {
        this.qrhId = sourceSystemCd;
    } else {
        this.qrhId = I2B2QueryResultsHandlerSourceId.getInstance().getStringRepresentation();
    }
    this.dataSourceBackendIds.add(this.qrhId);

    KnowledgeSourceBackend[] ksBackends = knowledgeSource.getBackends();
    this.knowledgeSourceBackendIds = new HashSet<>();
    for (int i = 0; i < ksBackends.length; i++) {
        String id = ksBackends[i].getId();
        if (id != null) {
            this.knowledgeSourceBackendIds.add(id);
        }
    }
    this.knowledgeSourceBackendIds.add(this.qrhId);

    this.eventListeners = eventListeners;
}

From source file:nl.strohalm.cyclos.utils.logging.LoggingHandler.java

/**
 * Log a tax status change//from ww w .j a  v a2  s  .com
 */
public void logAccountFeeStatus(final AccountFeeLog feeLog) {
    final Logger logger = getAccountFeeLogger();
    final Level level = AccountFeeLevel.STATUS.getLevel();
    if (logger.isLoggable(level)) {
        String status = null;
        switch (feeLog.getStatus()) {
        case RUNNING:
            status = "Started";
            break;
        case CANCELED:
            status = "Manually canceled";
            break;
        case NEVER_RAN:
            status = "Never ran";
            break;
        case PARTIALLY_FAILED:
            status = "Partially failed";
            break;
        case FINISHED:
            status = "Finished";
            break;
        }
        try {
            logger.log(level, feeLog.getAccountFee().getName() + ": " + status);
        } catch (final Exception e) {
            System.out
                    .println("Error generating log on " + settingsService.getLogSettings().getAccountFeeFile());
        }
    }
}

From source file:edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean.java

/**
 * /*w  ww. j  a v a 2  s .c o m*/
 * @param dataverse  the dataverse to harvest into
 * @param from       get updated studies from this beginning date
 * @param until      get updated studies until this end date
 * @param harvestErrorOccurred  have we encountered any errors during harvest?
 * @param failedIdentifiers     Study Identifiers for failed "GetRecord" requests
 */
private List<Long> harvestOAI(HarvestingDataverse dataverse, Logger hdLogger, String from, String until,
        MutableBoolean harvestErrorOccurred, List<String> failedIdentifiers)
        throws IOException, ParserConfigurationException, SAXException, TransformerException, JAXBException {

    List<Long> harvestedStudyIds = new ArrayList<Long>();

    ResumptionTokenType resumptionToken = null;

    do {
        //resumptionToken = harvesterService.harvestFromIdentifiers(hdLogger, resumptionToken, dataverse, from, until, harvestedStudyIds, failedIdentifiers, harvestErrorOccurred
        resumptionToken = harvestFromIdentifiers(hdLogger, resumptionToken, dataverse, from, until,
                harvestedStudyIds, failedIdentifiers, harvestErrorOccurred);
    } while (resumptionToken != null && !resumptionToken.equals(""));

    hdLogger.log(Level.INFO,
            "COMPLETED HARVEST, oaiUrl=" + dataverse.getServerUrl() + ",set=" + dataverse.getHarvestingSet()
                    + ", metadataPrefix=" + dataverse.getHarvestFormatType().getMetadataPrefix() + ", from="
                    + from + ", until=" + until);

    return harvestedStudyIds;

}

From source file:edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean.java

@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public ResumptionTokenType harvestFromIdentifiers(Logger hdLogger, ResumptionTokenType resumptionToken,
        HarvestingDataverse dataverse, String from, String until, List<Long> harvestedStudyIds,
        List<String> failedIdentifiers, MutableBoolean harvestErrorOccurred) throws java.io.IOException,
        ParserConfigurationException, SAXException, TransformerException, JAXBException {
    String encodedSet = dataverse.getHarvestingSet() == null ? null
            : URLEncoder.encode(dataverse.getHarvestingSet(), "UTF-8");
    ListIdentifiers listIdentifiers = null;

    if (resumptionToken == null) {
        listIdentifiers = new ListIdentifiers(dataverse.getServerUrl(), from, until, encodedSet,
                URLEncoder.encode(dataverse.getHarvestFormatType().getMetadataPrefix(), "UTF-8"));
    } else {//from   w  w  w  . j ava 2 s  .  c  om
        hdLogger.log(Level.INFO, "harvestFromIdentifiers(), resumptionToken=" + resumptionToken.getValue());
        listIdentifiers = new ListIdentifiers(dataverse.getServerUrl(), resumptionToken.getValue());
    }

    Document doc = listIdentifiers.getDocument();

    //       JAXBContext jc = JAXBContext.newInstance("edu.harvard.hmdc.vdcnet.jaxb.oai");
    //       Unmarshaller unmarshaller = jc.createUnmarshaller();
    JAXBElement unmarshalObj = (JAXBElement) unmarshaller.unmarshal(doc);
    OAIPMHtype oaiObj = (OAIPMHtype) unmarshalObj.getValue();

    if (oaiObj.getError() != null && oaiObj.getError().size() > 0) {
        if (oaiObj.getError().get(0).getCode().equals(OAIPMHerrorcodeType.NO_RECORDS_MATCH)) {
            hdLogger.info("ListIdentifiers returned NO_RECORDS_MATCH - no studies found to be harvested.");
        } else {
            handleOAIError(hdLogger, oaiObj,
                    "calling listIdentifiers, oaiServer= " + dataverse.getServerUrl() + ",from=" + from
                            + ",until=" + until + ",encodedSet=" + encodedSet + ",format="
                            + dataverse.getHarvestFormatType().getMetadataPrefix());
            throw new EJBException("Received OAI Error response calling ListIdentifiers");
        }
    } else {
        ListIdentifiersType listIdentifiersType = oaiObj.getListIdentifiers();
        if (listIdentifiersType != null) {
            resumptionToken = listIdentifiersType.getResumptionToken();
            for (Iterator it = listIdentifiersType.getHeader().iterator(); it.hasNext();) {
                HeaderType header = (HeaderType) it.next();
                MutableBoolean getRecordErrorOccurred = new MutableBoolean(false);
                Long studyId = getRecord(hdLogger, dataverse, header.getIdentifier(),
                        dataverse.getHarvestFormatType().getMetadataPrefix(), getRecordErrorOccurred);
                if (studyId != null) {
                    harvestedStudyIds.add(studyId);
                }
                if (getRecordErrorOccurred.booleanValue() == true) {
                    failedIdentifiers.add(header.getIdentifier());
                }

            }

        }
    }
    String logMsg = "Returning from harvestFromIdentifiers";

    if (resumptionToken == null) {
        logMsg += " resumptionToken is null";
    } else if (!StringUtil.isEmpty(resumptionToken.getValue())) {
        logMsg += " resumptionToken is " + resumptionToken.getValue();
    } else {
        // Some OAIServers return an empty resumptionToken element when all
        // the identifiers have been sent, so need to check  for this, and 
        // treat it as if resumptiontoken is null.
        logMsg += " resumptionToken is empty, setting return value to null.";
        resumptionToken = null;
    }
    hdLogger.info(logMsg);
    return resumptionToken;
}

From source file:edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean.java

/**
 * Harvest an individual Dataverse/*from  w  ww. j  a va  2 s .  c om*/
 * @param dataverseId
 */
public void doHarvesting(Long dataverseId) throws IOException {
    HarvestingDataverse dataverse = em.find(HarvestingDataverse.class, dataverseId);
    MutableBoolean harvestErrorOccurred = new MutableBoolean(false);
    String logTimestamp = logFormatter.format(new Date());
    Logger hdLogger = Logger.getLogger("edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean."
            + dataverse.getVdc().getAlias() + logTimestamp);
    String logFileName = FileUtil.getImportFileDir() + File.separator + "harvest_"
            + dataverse.getVdc().getAlias() + logTimestamp + ".log";
    FileHandler fileHandler = new FileHandler(logFileName);
    hdLogger.addHandler(fileHandler);
    List<Long> harvestedStudyIds = null;

    this.processedSizeThisBatch = 0;
    this.harvestedStudyIdsThisBatch = new ArrayList<Long>();

    List<String> failedIdentifiers = new ArrayList<String>();
    try {
        boolean harvestingNow = dataverse.isHarvestingNow();

        if (harvestingNow) {
            harvestErrorOccurred.setValue(true);
            hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Dataverse " + dataverse.getVdc().getName()
                    + " is currently being harvested.");

        } else {
            harvestingDataverseService.resetHarvestingStatus(dataverse.getId());
            String until = null; // If we don't set until date, we will get all the changes since the last harvest.
            String from = null;
            Date lastSuccessfulHarvestTime = dataverse.getLastSuccessfulHarvestTime();
            if (lastSuccessfulHarvestTime != null) {
                from = formatter.format(lastSuccessfulHarvestTime);
            }
            if (dataverse.isOai() || dataverse.isNesstar()) {
                harvestingDataverseService.setHarvestingNow(dataverse.getId(), true);
                Date currentTime = new Date();
                harvestingDataverseService.setLastHarvestTime(dataverse.getId(), currentTime);

                hdLogger.log(Level.INFO,
                        "BEGIN HARVEST..., oaiUrl=" + dataverse.getServerUrl() + ",set="
                                + dataverse.getHarvestingSet() + ", metadataPrefix="
                                + dataverse.getHarvestFormatType().getMetadataPrefix() + ", from=" + from
                                + ", until=" + until);

                if (dataverse.isOai()) {
                    harvestedStudyIds = harvestOAI(dataverse, hdLogger, from, until, harvestErrorOccurred,
                            failedIdentifiers);

                } else {
                    harvestedStudyIds = harvestNesstar(dataverse, hdLogger, harvestErrorOccurred,
                            failedIdentifiers);
                }
                harvestingDataverseService.setHarvestSuccess(dataverse.getId(), currentTime,
                        harvestedStudyIds.size(), failedIdentifiers.size());
                hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + dataverse.getServerUrl()
                        + ", metadataPrefix=" + dataverse.getHarvestFormatType().getMetadataPrefix());

                if (harvestedStudyIds.size() > 0) {
                    harvestingDataverseService.setHarvestSuccessNotEmpty(dataverse.getId(), currentTime,
                            harvestedStudyIds.size(), failedIdentifiers.size());
                    hdLogger.log(Level.INFO, "COMPLETED HARVEST with results");
                }
                // now index all studies (need to modify for update)
                if (this.processedSizeThisBatch > 0) {
                    hdLogger.log(Level.INFO, "POST HARVEST, reindexing the remaining studies.");
                    if (this.harvestedStudyIdsThisBatch != null) {
                        hdLogger.log(Level.INFO,
                                this.harvestedStudyIdsThisBatch.size() + " studies in the batch");
                    }
                    hdLogger.log(Level.INFO, this.processedSizeThisBatch + " bytes of content");
                    indexService.updateIndexList(this.harvestedStudyIdsThisBatch);
                    hdLogger.log(Level.INFO, "POST HARVEST, calls to index finished.");
                } else {
                    hdLogger.log(Level.INFO, "(All harvested content already reindexed)");
                }
            } else {
                harvestErrorOccurred.setValue(true);
                harvestingDataverseService.setHarvestFailure(dataverse.getId(), harvestedStudyIds.size(),
                        failedIdentifiers.size());

                hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Unknown harvest type.");
            }
        }
        mailService.sendHarvestNotification(vdcNetworkService.find().getSystemEmail(),
                dataverse.getVdc().getName(), logFileName, logTimestamp, harvestErrorOccurred.booleanValue(),
                harvestedStudyIds.size(), failedIdentifiers);
    } catch (Throwable e) {
        harvestErrorOccurred.setValue(true);
        String message = "Exception processing harvest, server= " + dataverse.getServerUrl() + ",format="
                + dataverse.getHarvestFormatType().getMetadataPrefix() + " " + e.getClass().getName() + " "
                + e.getMessage();
        hdLogger.log(Level.SEVERE, message);
        logException(e, hdLogger);
        hdLogger.log(Level.INFO, "HARVEST NOT COMPLETED DUE TO UNEXPECTED ERROR.");
        harvestingDataverseService.setHarvestFailure(dataverse.getId(), harvestedStudyIds.size(),
                failedIdentifiers.size());

    } finally {
        harvestingDataverseService.setHarvestingNow(dataverse.getId(), false);
        fileHandler.close();
        hdLogger.removeHandler(fileHandler);
    }
}

From source file:edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean.java

/**
 * Run a harvest for an individual harvesting Dataverse
 * @param dataverseRequest/*from   ww w.  java  2s  . c o m*/
 * @param harvestingClientId
 * @throws IOException
 */
public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId) throws IOException {
    HarvestingClient harvestingClientConfig = harvestingClientService.find(harvestingClientId);

    if (harvestingClientConfig == null) {
        throw new IOException("No such harvesting client: id=" + harvestingClientId);
    }

    Dataverse harvestingDataverse = harvestingClientConfig.getDataverse();

    MutableBoolean harvestErrorOccurred = new MutableBoolean(false);
    String logTimestamp = logFormatter.format(new Date());
    Logger hdLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean."
            + harvestingDataverse.getAlias() + logTimestamp);
    String logFileName = "../logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_"
            + logTimestamp + ".log";
    FileHandler fileHandler = new FileHandler(logFileName);
    hdLogger.setUseParentHandlers(false);
    hdLogger.addHandler(fileHandler);

    PrintWriter importCleanupLog = new PrintWriter(new FileWriter(
            "../logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".txt"));

    List<Long> harvestedDatasetIds = null;

    List<Long> harvestedDatasetIdsThisBatch = new ArrayList<Long>();

    List<String> failedIdentifiers = new ArrayList<String>();
    List<String> deletedIdentifiers = new ArrayList<String>();

    Date harvestStartTime = new Date();

    try {
        boolean harvestingNow = harvestingClientConfig.isHarvestingNow();

        if (harvestingNow) {
            harvestErrorOccurred.setValue(true);
            hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Dataverse " + harvestingDataverse.getName()
                    + " is currently being harvested.");

        } else {
            harvestingClientService.resetHarvestInProgress(harvestingClientId);
            harvestingClientService.setHarvestInProgress(harvestingClientId, harvestStartTime);

            if (harvestingClientConfig.isOai()) {
                harvestedDatasetIds = harvestOAI(dataverseRequest, harvestingClientConfig, hdLogger,
                        importCleanupLog, harvestErrorOccurred, failedIdentifiers, deletedIdentifiers,
                        harvestedDatasetIdsThisBatch);

            } else {
                throw new IOException("Unsupported harvest type");
            }
            harvestingClientService.setHarvestSuccess(harvestingClientId, new Date(),
                    harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size());
            hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + harvestingClientConfig.getArchiveUrl()
                    + ", metadataPrefix=" + harvestingClientConfig.getMetadataPrefix());
            hdLogger.log(Level.INFO,
                    "Datasets created/updated: " + harvestedDatasetIds.size() + ", datasets deleted: "
                            + deletedIdentifiers.size() + ", datasets failed: " + failedIdentifiers.size());

            // now index all the datasets we have harvested - created, modified or deleted:
            /* (TODO: may not be needed at all. In Dataverse4, we may be able to get away with the normal 
            reindexing after every import. See the rest of the comments about batch indexing throughout 
            this service bean)
            if (this.processedSizeThisBatch > 0) {
                hdLogger.log(Level.INFO, "POST HARVEST, reindexing the remaining studies.");
                if (this.harvestedDatasetIdsThisBatch != null) {
                    hdLogger.log(Level.INFO, this.harvestedDatasetIdsThisBatch.size()+" studies in the batch");
                }
                hdLogger.log(Level.INFO, this.processedSizeThisBatch + " bytes of content");
                indexService.updateIndexList(this.harvestedDatasetIdsThisBatch);
                hdLogger.log(Level.INFO, "POST HARVEST, calls to index finished.");
            } else {
                hdLogger.log(Level.INFO, "(All harvested content already reindexed)");
            }
             */
        }
        //mailService.sendHarvestNotification(...getSystemEmail(), harvestingDataverse.getName(), logFileName, logTimestamp, harvestErrorOccurred.booleanValue(), harvestedDatasetIds.size(), failedIdentifiers);
    } catch (Throwable e) {
        harvestErrorOccurred.setValue(true);
        String message = "Exception processing harvest, server= " + harvestingClientConfig.getHarvestingUrl()
                + ",format=" + harvestingClientConfig.getMetadataPrefix() + " " + e.getClass().getName() + " "
                + e.getMessage();
        hdLogger.log(Level.SEVERE, message);
        logException(e, hdLogger);
        hdLogger.log(Level.INFO, "HARVEST NOT COMPLETED DUE TO UNEXPECTED ERROR.");
        // TODO: 
        // even though this harvesting run failed, we may have had successfully 
        // processed some number of datasets, by the time the exception was thrown. 
        // We should record that number too. And the number of the datasets that
        // had failed, that we may have counted.  -- L.A. 4.4
        harvestingClientService.setHarvestFailure(harvestingClientId, new Date());

    } finally {
        harvestingClientService.resetHarvestInProgress(harvestingClientId);
        fileHandler.close();
        hdLogger.removeHandler(fileHandler);
        importCleanupLog.close();
    }
}

From source file:nl.strohalm.cyclos.utils.logging.LoggingHandlerImpl.java

/**
 * Log an account fee transfer//  w  w w  . j a  v  a 2  s . c  o  m
 */
@Override
public void logAccountFeePayment(final Transfer transfer) {
    final Logger logger = getAccountFeeLogger();
    final Level level = AccountFeeLevel.DETAILED.getLevel();
    if (logger.isLoggable(level)) {
        final AccountFeeLog feeLog = transfer.getAccountFeeLog();
        final AccountFee fee = feeLog.getAccountFee();
        final UnitsConverter unitsConverter = settingsService.getLocalSettings()
                .getUnitsConverter(transfer.getFrom().getType().getCurrency().getPattern());
        String message;
        Object[] params;
        if (fee.getPaymentDirection() == PaymentDirection.TO_SYSTEM) {
            message = "Charged %s from %s";
            params = new Object[] { unitsConverter.toString(transfer.getAmount()),
                    transfer.getFrom().getOwnerName() };
        } else {
            message = "Paid %s to %s";
            params = new Object[] { unitsConverter.toString(transfer.getAmount()),
                    transfer.getTo().getOwnerName() };
        }
        try {
            logger.log(level, String.format(message, params));
        } catch (final Exception e) {
            System.out
                    .println("Error generating log on " + settingsService.getLogSettings().getAccountFeeFile());
        }
    }
}