List of usage examples for java.util.logging FileHandler close
@Override public synchronized void close() throws SecurityException
From source file:Main.java
public static void main(String args[]) throws Exception { LogManager lm = LogManager.getLogManager(); Logger logger;//from ww w .j av a 2s .c om FileHandler fh = new FileHandler("log_test.txt"); logger = Logger.getLogger("LoggingExample1"); lm.addLogger(logger); logger.setLevel(Level.INFO); fh.setFormatter(new XMLFormatter()); logger.addHandler(fh); //logger.setUseParentHandlers(false); logger.log(Level.INFO, "test 1"); logger.log(Level.INFO, "test 2"); logger.log(Level.INFO, "test 3"); fh.close(); }
From source file:LoggingExample1.java
public static void main(String args[]) { try {/*ww w .ja v a 2 s.co m*/ LogManager lm = LogManager.getLogManager(); Logger logger; FileHandler fh = new FileHandler("log_test.txt"); logger = Logger.getLogger("LoggingExample1"); lm.addLogger(logger); logger.setLevel(Level.INFO); fh.setFormatter(new XMLFormatter()); logger.addHandler(fh); //logger.setUseParentHandlers(false); logger.log(Level.INFO, "test 1"); logger.log(Level.INFO, "test 2"); logger.log(Level.INFO, "test 3"); fh.close(); } catch (Exception e) { System.out.println("Exception thrown: " + e); e.printStackTrace(); } }
From source file:HTMLFormatter.java
public static void main(String args[]) throws Exception { LogManager lm = LogManager.getLogManager(); Logger parentLogger, childLogger; FileHandler xml_handler = new FileHandler("log_output.xml"); FileHandler html_handler = new FileHandler("log_output.html"); parentLogger = Logger.getLogger("ParentLogger"); childLogger = Logger.getLogger("ParentLogger.ChildLogger"); lm.addLogger(parentLogger);/*ww w . j a v a2 s .c om*/ lm.addLogger(childLogger); parentLogger.setLevel(Level.WARNING); childLogger.setLevel(Level.ALL); xml_handler.setFormatter(new XMLFormatter()); html_handler.setFormatter(new HTMLFormatter()); parentLogger.addHandler(xml_handler); childLogger.addHandler(html_handler); childLogger.log(Level.FINE, "This is a fine log message"); childLogger.log(Level.SEVERE, "This is a severe log message"); xml_handler.close(); html_handler.close(); }
From source file:edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean.java
/** * Run a harvest for an individual harvesting Dataverse * @param dataverseRequest/*w w w. j a va2s. c om*/ * @param harvestingClientId * @throws IOException */ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId) throws IOException { HarvestingClient harvestingClientConfig = harvestingClientService.find(harvestingClientId); if (harvestingClientConfig == null) { throw new IOException("No such harvesting client: id=" + harvestingClientId); } Dataverse harvestingDataverse = harvestingClientConfig.getDataverse(); MutableBoolean harvestErrorOccurred = new MutableBoolean(false); String logTimestamp = logFormatter.format(new Date()); Logger hdLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean." + harvestingDataverse.getAlias() + logTimestamp); String logFileName = "../logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".log"; FileHandler fileHandler = new FileHandler(logFileName); hdLogger.setUseParentHandlers(false); hdLogger.addHandler(fileHandler); PrintWriter importCleanupLog = new PrintWriter(new FileWriter( "../logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".txt")); List<Long> harvestedDatasetIds = null; List<Long> harvestedDatasetIdsThisBatch = new ArrayList<Long>(); List<String> failedIdentifiers = new ArrayList<String>(); List<String> deletedIdentifiers = new ArrayList<String>(); Date harvestStartTime = new Date(); try { boolean harvestingNow = harvestingClientConfig.isHarvestingNow(); if (harvestingNow) { harvestErrorOccurred.setValue(true); hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Dataverse " + harvestingDataverse.getName() + " is currently being harvested."); } else { harvestingClientService.resetHarvestInProgress(harvestingClientId); harvestingClientService.setHarvestInProgress(harvestingClientId, harvestStartTime); if (harvestingClientConfig.isOai()) { harvestedDatasetIds = harvestOAI(dataverseRequest, harvestingClientConfig, hdLogger, importCleanupLog, harvestErrorOccurred, failedIdentifiers, deletedIdentifiers, harvestedDatasetIdsThisBatch); } else { throw new IOException("Unsupported harvest type"); } harvestingClientService.setHarvestSuccess(harvestingClientId, new Date(), harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size()); hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + harvestingClientConfig.getArchiveUrl() + ", metadataPrefix=" + harvestingClientConfig.getMetadataPrefix()); hdLogger.log(Level.INFO, "Datasets created/updated: " + harvestedDatasetIds.size() + ", datasets deleted: " + deletedIdentifiers.size() + ", datasets failed: " + failedIdentifiers.size()); // now index all the datasets we have harvested - created, modified or deleted: /* (TODO: may not be needed at all. In Dataverse4, we may be able to get away with the normal reindexing after every import. See the rest of the comments about batch indexing throughout this service bean) if (this.processedSizeThisBatch > 0) { hdLogger.log(Level.INFO, "POST HARVEST, reindexing the remaining studies."); if (this.harvestedDatasetIdsThisBatch != null) { hdLogger.log(Level.INFO, this.harvestedDatasetIdsThisBatch.size()+" studies in the batch"); } hdLogger.log(Level.INFO, this.processedSizeThisBatch + " bytes of content"); indexService.updateIndexList(this.harvestedDatasetIdsThisBatch); hdLogger.log(Level.INFO, "POST HARVEST, calls to index finished."); } else { hdLogger.log(Level.INFO, "(All harvested content already reindexed)"); } */ } //mailService.sendHarvestNotification(...getSystemEmail(), harvestingDataverse.getName(), logFileName, logTimestamp, harvestErrorOccurred.booleanValue(), harvestedDatasetIds.size(), failedIdentifiers); } catch (Throwable e) { harvestErrorOccurred.setValue(true); String message = "Exception processing harvest, server= " + harvestingClientConfig.getHarvestingUrl() + ",format=" + harvestingClientConfig.getMetadataPrefix() + " " + e.getClass().getName() + " " + e.getMessage(); hdLogger.log(Level.SEVERE, message); logException(e, hdLogger); hdLogger.log(Level.INFO, "HARVEST NOT COMPLETED DUE TO UNEXPECTED ERROR."); // TODO: // even though this harvesting run failed, we may have had successfully // processed some number of datasets, by the time the exception was thrown. // We should record that number too. And the number of the datasets that // had failed, that we may have counted. -- L.A. 4.4 harvestingClientService.setHarvestFailure(harvestingClientId, new Date()); } finally { harvestingClientService.resetHarvestInProgress(harvestingClientId); fileHandler.close(); hdLogger.removeHandler(fileHandler); importCleanupLog.close(); } }
From source file:edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean.java
/** * Harvest an individual Dataverse//w w w .j a v a2 s . c o m * @param dataverseId */ public void doHarvesting(Long dataverseId) throws IOException { HarvestingDataverse dataverse = em.find(HarvestingDataverse.class, dataverseId); MutableBoolean harvestErrorOccurred = new MutableBoolean(false); String logTimestamp = logFormatter.format(new Date()); Logger hdLogger = Logger.getLogger("edu.harvard.iq.dvn.core.harvest.HarvesterServiceBean." + dataverse.getVdc().getAlias() + logTimestamp); String logFileName = FileUtil.getImportFileDir() + File.separator + "harvest_" + dataverse.getVdc().getAlias() + logTimestamp + ".log"; FileHandler fileHandler = new FileHandler(logFileName); hdLogger.addHandler(fileHandler); List<Long> harvestedStudyIds = null; this.processedSizeThisBatch = 0; this.harvestedStudyIdsThisBatch = new ArrayList<Long>(); List<String> failedIdentifiers = new ArrayList<String>(); try { boolean harvestingNow = dataverse.isHarvestingNow(); if (harvestingNow) { harvestErrorOccurred.setValue(true); hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Dataverse " + dataverse.getVdc().getName() + " is currently being harvested."); } else { harvestingDataverseService.resetHarvestingStatus(dataverse.getId()); String until = null; // If we don't set until date, we will get all the changes since the last harvest. String from = null; Date lastSuccessfulHarvestTime = dataverse.getLastSuccessfulHarvestTime(); if (lastSuccessfulHarvestTime != null) { from = formatter.format(lastSuccessfulHarvestTime); } if (dataverse.isOai() || dataverse.isNesstar()) { harvestingDataverseService.setHarvestingNow(dataverse.getId(), true); Date currentTime = new Date(); harvestingDataverseService.setLastHarvestTime(dataverse.getId(), currentTime); hdLogger.log(Level.INFO, "BEGIN HARVEST..., oaiUrl=" + dataverse.getServerUrl() + ",set=" + dataverse.getHarvestingSet() + ", metadataPrefix=" + dataverse.getHarvestFormatType().getMetadataPrefix() + ", from=" + from + ", until=" + until); if (dataverse.isOai()) { harvestedStudyIds = harvestOAI(dataverse, hdLogger, from, until, harvestErrorOccurred, failedIdentifiers); } else { harvestedStudyIds = harvestNesstar(dataverse, hdLogger, harvestErrorOccurred, failedIdentifiers); } harvestingDataverseService.setHarvestSuccess(dataverse.getId(), currentTime, harvestedStudyIds.size(), failedIdentifiers.size()); hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + dataverse.getServerUrl() + ", metadataPrefix=" + dataverse.getHarvestFormatType().getMetadataPrefix()); if (harvestedStudyIds.size() > 0) { harvestingDataverseService.setHarvestSuccessNotEmpty(dataverse.getId(), currentTime, harvestedStudyIds.size(), failedIdentifiers.size()); hdLogger.log(Level.INFO, "COMPLETED HARVEST with results"); } // now index all studies (need to modify for update) if (this.processedSizeThisBatch > 0) { hdLogger.log(Level.INFO, "POST HARVEST, reindexing the remaining studies."); if (this.harvestedStudyIdsThisBatch != null) { hdLogger.log(Level.INFO, this.harvestedStudyIdsThisBatch.size() + " studies in the batch"); } hdLogger.log(Level.INFO, this.processedSizeThisBatch + " bytes of content"); indexService.updateIndexList(this.harvestedStudyIdsThisBatch); hdLogger.log(Level.INFO, "POST HARVEST, calls to index finished."); } else { hdLogger.log(Level.INFO, "(All harvested content already reindexed)"); } } else { harvestErrorOccurred.setValue(true); harvestingDataverseService.setHarvestFailure(dataverse.getId(), harvestedStudyIds.size(), failedIdentifiers.size()); hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Unknown harvest type."); } } mailService.sendHarvestNotification(vdcNetworkService.find().getSystemEmail(), dataverse.getVdc().getName(), logFileName, logTimestamp, harvestErrorOccurred.booleanValue(), harvestedStudyIds.size(), failedIdentifiers); } catch (Throwable e) { harvestErrorOccurred.setValue(true); String message = "Exception processing harvest, server= " + dataverse.getServerUrl() + ",format=" + dataverse.getHarvestFormatType().getMetadataPrefix() + " " + e.getClass().getName() + " " + e.getMessage(); hdLogger.log(Level.SEVERE, message); logException(e, hdLogger); hdLogger.log(Level.INFO, "HARVEST NOT COMPLETED DUE TO UNEXPECTED ERROR."); harvestingDataverseService.setHarvestFailure(dataverse.getId(), harvestedStudyIds.size(), failedIdentifiers.size()); } finally { harvestingDataverseService.setHarvestingNow(dataverse.getId(), false); fileHandler.close(); hdLogger.removeHandler(fileHandler); } }
From source file:edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage.java
public String importBatch_action() { FileHandler logFileHandler = null; Logger importLogger = null;/* ww w . j av a 2s. c o m*/ if (importBatchDir == null || importBatchDir.equals("")) return null; try { int importFailureCount = 0; int fileFailureCount = 0; List<Long> studiesToIndex = new ArrayList<Long>(); //sessionId = ((HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false)).getId(); File batchDir = new File(importBatchDir); if (batchDir.exists() && batchDir.isDirectory()) { // create Logger String logTimestamp = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date()); String dvAlias = vdcService.find(importDVId).getAlias(); importLogger = Logger.getLogger( "edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage." + dvAlias + "_" + logTimestamp); String logFileName = FileUtil.getImportFileDir() + File.separator + "batch_" + dvAlias + "_" + logTimestamp + ".log"; logFileHandler = new FileHandler(logFileName); importLogger.addHandler(logFileHandler); importLogger .info("BEGIN BATCH IMPORT (dvId = " + importDVId + ") from directory: " + importBatchDir); for (int i = 0; i < batchDir.listFiles().length; i++) { File studyDir = batchDir.listFiles()[i]; if (studyDir.isDirectory()) { // one directory per study importLogger.info("Found study directory: " + studyDir.getName()); File xmlFile = null; Map<File, String> filesToUpload = new HashMap(); for (int j = 0; j < studyDir.listFiles().length; j++) { File file = studyDir.listFiles()[j]; if ("study.xml".equals(file.getName())) { xmlFile = file; } else { addFile(file, "", filesToUpload); } } if (xmlFile != null) { try { importLogger.info("Found study.xml and " + filesToUpload.size() + " other " + (filesToUpload.size() == 1 ? "file." : "files.")); // TODO: we need to incorporate the add files step into the same transaction of the import!!! Study study = studyService.importStudy(xmlFile, importFileFormat, importDVId, getVDCSessionBean().getLoginBean().getUser().getId()); study.getLatestVersion().setVersionNote("Study imported via batch import."); importLogger.info("Import of study.xml succeeded: study id = " + study.getId()); studiesToIndex.add(study.getId()); if (!filesToUpload.isEmpty()) { List<StudyFileEditBean> fileBeans = new ArrayList(); for (File file : filesToUpload.keySet()) { StudyFileEditBean fileBean = new StudyFileEditBean(file, studyService.generateFileSystemNameSequence(), study); fileBean.getFileMetadata().setCategory(filesToUpload.get(file)); fileBeans.add(fileBean); } try { studyFileService.addFiles(study.getLatestVersion(), fileBeans, getVDCSessionBean().getLoginBean().getUser()); importLogger.info("File upload succeeded."); } catch (Exception e) { fileFailureCount++; importLogger.severe("File Upload failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } } catch (Exception e) { importFailureCount++; importLogger.severe("Import failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } else { // no ddi.xml found in studyDir importLogger.warning("No study.xml file was found in study directory. Skipping... "); } } else { importLogger.warning("Found non directory at top level. Skipping... (filename = " + studyDir.getName() + ")"); } } // generate status message String statusMessage = studiesToIndex.size() + (studiesToIndex.size() == 1 ? " study" : " studies") + " successfully imported"; statusMessage += (fileFailureCount == 0 ? "" : " (" + fileFailureCount + " of which failed file upload)"); statusMessage += (importFailureCount == 0 ? "." : "; " + importFailureCount + (importFailureCount == 1 ? " study" : " studies") + " failed import."); importLogger.info("COMPLETED BATCH IMPORT: " + statusMessage); // now index all studies importLogger.info("POST BATCH IMPORT, start calls to index."); indexService.updateIndexList(studiesToIndex); importLogger.info("POST BATCH IMPORT, calls to index finished."); addMessage("importMessage", "Batch Import request completed."); addMessage("importMessage", statusMessage); addMessage("importMessage", "For more detail see log file at: " + logFileName); } else { addMessage("importMessage", "Batch Import failed: " + importBatchDir + " does not exist or is not a directory."); } } catch (Exception e) { e.printStackTrace(); addMessage("importMessage", "Batch Import failed: An unexpected error occurred during processing."); addMessage("importMessage", "Exception message: " + e.getMessage()); } finally { if (logFileHandler != null) { logFileHandler.close(); importLogger.removeHandler(logFileHandler); } // importBatchDir = ""; } return null; }
From source file:edu.harvard.iq.dvn.core.web.admin.OptionsPage.java
public String importBatch_action() { FileHandler logFileHandler = null; Logger importLogger = null;//from www .j a v a 2s .com if (importBatchDir == null || importBatchDir.equals("")) return null; try { int importFailureCount = 0; int fileFailureCount = 0; List<Long> studiesToIndex = new ArrayList<Long>(); //sessionId = ((HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false)).getId(); sessionId = "batchimportsession"; File batchDir = new File(importBatchDir); if (batchDir.exists() && batchDir.isDirectory()) { // create Logger String logTimestamp = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date()); String dvAlias = vdcService.find(importDVId).getAlias(); importLogger = Logger.getLogger( "edu.harvard.iq.dvn.core.web.networkAdmin.UtilitiesPage." + dvAlias + "_" + logTimestamp); String logFileName = FileUtil.getImportFileDir() + File.separator + "batch_" + dvAlias + "_" + logTimestamp + ".log"; logFileHandler = new FileHandler(logFileName); importLogger.addHandler(logFileHandler); importLogger .info("BEGIN BATCH IMPORT (dvId = " + importDVId + ") from directory: " + importBatchDir); for (int i = 0; i < batchDir.listFiles().length; i++) { File studyDir = batchDir.listFiles()[i]; if (studyDir.isDirectory()) { // one directory per study importLogger.info("Found study directory: " + studyDir.getName()); File xmlFile = null; Map<File, String> filesToUpload = new HashMap(); for (int j = 0; j < studyDir.listFiles().length; j++) { File file = studyDir.listFiles()[j]; if ("study.xml".equals(file.getName())) { xmlFile = file; } else { addFile(file, "", filesToUpload, importLogger); } } if (xmlFile != null) { try { importLogger.info("Found study.xml and " + filesToUpload.size() + " other " + (filesToUpload.size() == 1 ? "file." : "files.")); // TODO: we need to incorporate the add files step into the same transaction of the import!!! Study study = studyService.importStudy(xmlFile, importFileFormat, importDVId, getVDCSessionBean().getLoginBean().getUser().getId()); study.getLatestVersion().setVersionNote("Study imported via batch import."); importLogger.info("Import of study.xml succeeded: study id = " + study.getId()); studiesToIndex.add(study.getId()); if (!filesToUpload.isEmpty()) { List<StudyFileEditBean> fileBeans = new ArrayList(); for (File file : filesToUpload.keySet()) { StudyFileEditBean fileBean = new StudyFileEditBean(file, studyService.generateFileSystemNameSequence(), study); fileBean.getFileMetadata().setCategory(filesToUpload.get(file)); fileBeans.add(fileBean); } try { studyFileService.addFiles(study.getLatestVersion(), fileBeans, getVDCSessionBean().getLoginBean().getUser()); importLogger.info("File upload succeeded."); } catch (Exception e) { fileFailureCount++; importLogger.severe("File Upload failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } } catch (Exception e) { importFailureCount++; importLogger.severe("Import failed (dir = " + studyDir.getName() + "): exception message = " + e.getMessage()); logException(e, importLogger); } } else { // no ddi.xml found in studyDir importLogger.warning("No study.xml file was found in study directory. Skipping... "); } } else { importLogger.warning("Found non directory at top level. Skipping... (filename = " + studyDir.getName() + ")"); } } // generate status message String statusMessage = studiesToIndex.size() + (studiesToIndex.size() == 1 ? " study" : " studies") + " successfully imported"; statusMessage += (fileFailureCount == 0 ? "" : " (" + fileFailureCount + " of which failed file upload)"); statusMessage += (importFailureCount == 0 ? "." : "; " + importFailureCount + (importFailureCount == 1 ? " study" : " studies") + " failed import."); importLogger.info("COMPLETED BATCH IMPORT: " + statusMessage); // now index all studies importLogger.info("POST BATCH IMPORT, start calls to index."); indexService.updateIndexList(studiesToIndex); importLogger.info("POST BATCH IMPORT, calls to index finished."); addMessage("importMessage", "Batch Import request completed."); addMessage("importMessage", statusMessage); addMessage("importMessage", "For more detail see log file at: " + logFileName); } else { addMessage("importMessage", "Batch Import failed: " + importBatchDir + " does not exist or is not a directory."); } } catch (Exception e) { e.printStackTrace(); addMessage("importMessage", "Batch Import failed: An unexpected error occurred during processing."); addMessage("importMessage", "Exception message: " + e.getMessage()); } finally { if (logFileHandler != null) { logFileHandler.close(); importLogger.removeHandler(logFileHandler); } // importBatchDir = ""; } return null; }
From source file:org.geotools.utils.imagemosaic.MosaicIndexBuilder.java
/** * Main thread for the mosaic index builder. *//* ww w . j a va 2 s .c o m*/ public void run() { // ///////////////////////////////////////////////////////////////////// // // CREATING INDEX FILE // // ///////////////////////////////////////////////////////////////////// // ///////////////////////////////////////////////////////////////////// // // Create a file handler that write log record to a file called // my.log // // ///////////////////////////////////////////////////////////////////// FileHandler handler = null; try { boolean append = true; handler = new FileHandler(new StringBuffer(locationPath).append("/error.txt").toString(), append); handler.setLevel(Level.SEVERE); // Add to the desired logger LOGGER.addHandler(handler); // ///////////////////////////////////////////////////////////////////// // // Create a set of file names that have to be skipped since these are // our metadata files // // ///////////////////////////////////////////////////////////////////// final Set<String> skipFiles = new HashSet<String>( Arrays.asList(new String[] { indexName + ".shp", indexName + ".dbf", indexName + ".shx", indexName + ".prj", "error.txt", "error.txt.lck", indexName + ".properties" })); // ///////////////////////////////////////////////////////////////////// // // Creating temp vars // // ///////////////////////////////////////////////////////////////////// ShapefileDataStore index = null; Transaction t = new DefaultTransaction(); // declaring a preciosion model to adhere the java double type // precision PrecisionModel precMod = new PrecisionModel(PrecisionModel.FLOATING); GeometryFactory geomFactory = new GeometryFactory(precMod); try { index = new ShapefileDataStore( new File(locationPath + File.separator + indexName + ".shp").toURI().toURL()); } catch (MalformedURLException ex) { if (LOGGER.isLoggable(Level.SEVERE)) LOGGER.log(Level.SEVERE, ex.getLocalizedMessage(), ex); fireException(ex); return; } final List<File> files = new ArrayList<File>(); recurse(files, locationPath); // ///////////////////////////////////////////////////////////////////// // // Cycling over the files that have filtered out // // ///////////////////////////////////////////////////////////////////// numFiles = files.size(); String validFileName = null; final Iterator<File> filesIt = files.iterator(); FeatureWriter<SimpleFeatureType, SimpleFeature> fw = null; boolean doneSomething = false; for (int i = 0; i < numFiles; i++) { StringBuffer message; // // // // Check that this file is actually good to go // // // final File fileBeingProcessed = ((File) filesIt.next()); if (!fileBeingProcessed.exists() || !fileBeingProcessed.canRead() || !fileBeingProcessed.isFile()) { // send a message message = new StringBuffer("Skipped file ").append(files.get(i)) .append(" snce it seems invalid."); if (LOGGER.isLoggable(Level.INFO)) LOGGER.info(message.toString()); fireEvent(message.toString(), ((i * 99.0) / numFiles)); continue; } // // // // Anyone has asked us to stop? // // // if (getStopThread()) { message = new StringBuffer("Stopping requested at file ").append(i).append(" of ") .append(numFiles).append(" files"); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), ((i * 100.0) / numFiles)); return; } // replacing chars on input path try { validFileName = fileBeingProcessed.getCanonicalPath(); } catch (IOException e1) { fireException(e1); return; } validFileName = validFileName.replace('\\', '/'); validFileName = validFileName.substring(locationPath.length() + 1, fileBeingProcessed.getAbsolutePath().length()); if (skipFiles.contains(validFileName)) continue; message = new StringBuffer("Now indexing file ").append(validFileName); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), ((i * 100.0) / numFiles)); try { // //////////////////////////////////////////////////////// // // // STEP 1 // Getting an ImageIO reader for this coverage. // // // //////////////////////////////////////////////////////// ImageInputStream inStream = ImageIO.createImageInputStream(fileBeingProcessed); if (inStream == null) { if (LOGGER.isLoggable(Level.SEVERE)) LOGGER.severe(fileBeingProcessed + " has been skipped since we could not get a stream for it"); continue; } inStream.mark(); final Iterator<ImageReader> it = ImageIO.getImageReaders(inStream); ImageReader r = null; if (it.hasNext()) { r = (ImageReader) it.next(); r.setInput(inStream); } else { // release resources try { inStream.close(); } catch (Exception e) { // ignore exception } // try { // r.dispose(); // } catch (Exception e) { // // ignore exception // } // send a message message = new StringBuffer("Skipped file ").append(files.get(i)) .append(":No ImageIO readeres avalaible."); if (LOGGER.isLoggable(Level.INFO)) LOGGER.info(message.toString()); fireEvent(message.toString(), ((i * 99.0) / numFiles)); continue; } // //////////////////////////////////////////////////////// // // STEP 2 // Getting a coverage reader for this coverage. // // //////////////////////////////////////////////////////// if (LOGGER.isLoggable(Level.FINE)) LOGGER.fine(new StringBuffer("Getting a reader").toString()); final AbstractGridFormat format = (AbstractGridFormat) GridFormatFinder .findFormat(files.get(i)); if (format == null || !format.accepts(files.get(i))) { // release resources try { inStream.close(); } catch (Exception e) { // ignore exception } try { r.dispose(); } catch (Exception e) { // ignore exception } message = new StringBuffer("Skipped file ").append(files.get(i)) .append(": File format is not supported."); if (LOGGER.isLoggable(Level.INFO)) LOGGER.info(message.toString()); fireEvent(message.toString(), ((i * 99.0) / numFiles)); continue; } final AbstractGridCoverage2DReader reader = (AbstractGridCoverage2DReader) format .getReader(files.get(i)); envelope = (GeneralEnvelope) reader.getOriginalEnvelope(); actualCRS = reader.getCrs(); // ///////////////////////////////////////////////////////////////////// // // STEP 3 // Get the type specifier for this image and the check that the // image has the correct sample model and color model. // If this is the first cycle of the loop we initialize // eveything. // // ///////////////////////////////////////////////////////////////////// final ImageTypeSpecifier its = ((ImageTypeSpecifier) r.getImageTypes(0).next()); boolean skipFeature = false; if (globEnvelope == null) { // ///////////////////////////////////////////////////////////////////// // // at the first step we initialize everything that we will // reuse afterwards starting with color models, sample // models, crs, etc.... // // ///////////////////////////////////////////////////////////////////// defaultCM = its.getColorModel(); if (defaultCM instanceof IndexColorModel) { IndexColorModel icm = (IndexColorModel) defaultCM; int numBands = defaultCM.getNumColorComponents(); defaultPalette = new byte[3][icm.getMapSize()]; icm.getReds(defaultPalette[0]); icm.getGreens(defaultPalette[0]); icm.getBlues(defaultPalette[0]); if (numBands == 4) icm.getAlphas(defaultPalette[0]); } defaultSM = its.getSampleModel(); defaultCRS = actualCRS; globEnvelope = new GeneralEnvelope(envelope); // ///////////////////////////////////////////////////////////////////// // // getting information about resolution // // ///////////////////////////////////////////////////////////////////// // // // // get the dimension of the hr image and build the model // as well as // computing the resolution // // // resetting reader and recreating stream, turnaround for a // strange imageio bug r.reset(); try { inStream.reset(); } catch (IOException e) { inStream = ImageIO.createImageInputStream(fileBeingProcessed); } //let's check if we got something now if (inStream == null) { //skip file if (LOGGER.isLoggable(Level.WARNING)) LOGGER.warning("Skipping file " + fileBeingProcessed.toString()); continue; } r.setInput(inStream); numberOfLevels = r.getNumImages(true); resolutionLevels = new double[2][numberOfLevels]; double[] res = getResolution(envelope, new Rectangle(r.getWidth(0), r.getHeight(0)), defaultCRS); resolutionLevels[0][0] = res[0]; resolutionLevels[1][0] = res[1]; // resolutions levels if (numberOfLevels > 1) { for (int k = 0; k < numberOfLevels; k++) { res = getResolution(envelope, new Rectangle(r.getWidth(k), r.getHeight(k)), defaultCRS); resolutionLevels[0][k] = res[0]; resolutionLevels[1][k] = res[1]; } } // ///////////////////////////////////////////////////////////////////// // // creating the schema // // ///////////////////////////////////////////////////////////////////// final SimpleFeatureTypeBuilder featureBuilder = new SimpleFeatureTypeBuilder(); featureBuilder.setName("Flag"); featureBuilder.setNamespaceURI("http://www.geo-solutions.it/"); featureBuilder.add("location", String.class); featureBuilder.add("the_geom", Polygon.class, this.actualCRS); featureBuilder.setDefaultGeometry("the_geom"); final SimpleFeatureType simpleFeatureType = featureBuilder.buildFeatureType(); // create the schema for the new shape file index.createSchema(simpleFeatureType); // get a feature writer fw = index.getFeatureWriter(t); } else { // //////////////////////////////////////////////////////// // // comparing ColorModel // comparing SampeModel // comparing CRSs // //////////////////////////////////////////////////////// globEnvelope.add(envelope); actualCM = its.getColorModel(); actualSM = its.getSampleModel(); skipFeature = (i > 0 ? !(CRS.equalsIgnoreMetadata(defaultCRS, actualCRS)) : false); if (skipFeature) LOGGER.warning(new StringBuffer("Skipping image ").append(files.get(i)) .append(" because CRSs do not match.").toString()); skipFeature = checkColorModels(defaultCM, defaultPalette, actualCM); if (skipFeature) LOGGER.warning(new StringBuffer("Skipping image ").append(files.get(i)) .append(" because color models do not match.").toString()); // defaultCM.getNumComponents()==actualCM.getNumComponents()&& // defaultCM.getClass().equals(actualCM.getClass()) // && defaultSM.getNumBands() == actualSM // .getNumBands() // && defaultSM.getDataType() == actualSM // .getDataType() && // // if (skipFeature) // LOGGER // .warning(new StringBuffer("Skipping image ") // .append(files.get(i)) // .append( // " because cm or sm does not match.") // .toString()); // res = getResolution(envelope, new // Rectangle(r.getWidth(0), // r.getHeight(0)), defaultCRS); // if (Math.abs((resX - res[0]) / resX) > EPS // || Math.abs(resY - res[1]) > EPS) { // LOGGER.warning(new StringBuffer("Skipping image // ").append( // files.get(i)).append( // " because resolutions does not match.") // .toString()); // skipFeature = true; // } } // //////////////////////////////////////////////////////// // // STEP 4 // // create and store features // // //////////////////////////////////////////////////////// if (!skipFeature) { final SimpleFeature feature = fw.next(); feature.setAttribute(1, geomFactory.toGeometry(new ReferencedEnvelope((Envelope) envelope))); feature.setAttribute( 0, absolute ? new StringBuilder(this.locationPath).append(File.separatorChar) .append(validFileName).toString() : validFileName); fw.write(); message = new StringBuffer("Done with file ").append(files.get(i)); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } message.append('\n'); fireEvent(message.toString(), (((i + 1) * 99.0) / numFiles)); doneSomething = true; } else skipFeature = false; // //////////////////////////////////////////////////////// // // STEP 5 // // release resources // // //////////////////////////////////////////////////////// try { inStream.close(); } catch (Exception e) { // ignore exception } try { r.dispose(); } catch (Exception e) { // ignore exception } // release resources reader.dispose(); } catch (IOException e) { fireException(e); break; } catch (ArrayIndexOutOfBoundsException e) { fireException(e); break; } } try { if (fw != null) fw.close(); t.commit(); t.close(); index.dispose(); } catch (IOException e) { LOGGER.log(Level.SEVERE, e.getLocalizedMessage(), e); } createPropertiesFiles(globEnvelope, doneSomething); } catch (SecurityException el) { fireException(el); return; } catch (IOException el) { fireException(el); return; } finally { try { if (handler != null) handler.close(); } catch (Throwable e) { // ignore } } }
From source file:util.Log.java
/** * Gera um arquivo log dentro da Pasta Logger que fica na Pasta principal do * Usuario.//from w w w . j a va 2s.c o m * * @param className * @param ex */ public static void relatarExcecao(String className, Exception ex) { try { /* * Informamos qual o nome do Logger, que no caso vai ser o nome da * Classe que acontecer a exceo */ Logger log = Logger.getLogger(className); /* * Variavel que vai conter qual a pasta do sistema que liga ao * usuario, por padro ser do sistema operacional Windows */ String systemPath = "/Users/"; /* Se for outro sistema operacional */ if (System.getProperty("os.name").startsWith("Linux")) { systemPath = "/home/"; } /* Pasta onde vamos colocar os Logs */ File pastaLog = new File(systemPath + System.getProperty("user.name") + "/Logger"); if (!pastaLog.exists()) { pastaLog.mkdir(); } String arquivoDir = pastaLog.getAbsolutePath() + "/LOG_" + LocalDateTime.now().format(DateTimeFormatter.ofPattern("dd-MM_HH-mm-ss")) + ".log"; /* Classe responsavel por escrever o arquivo */ FileHandler escrever = new FileHandler(arquivoDir, true); /* * Precisamos informar como ser escrito(formato) as excees, Vamos * Utilizar uma Classe j pronta para isso a Classe SimpleFormatter */ escrever.setFormatter(new SimpleFormatter()); /* * Adicionamos ao nosso log a Classe que vai escrever a exceo que * for gerada */ log.addHandler(escrever); /* * Geramos o Log, passamos que ser de Nivel Severe(Alto), e * passamos a exceo para ele */ log.log(Level.SEVERE, null, ex); /* Finalizamos a escrita */ escrever.flush(); escrever.close(); /*Envia por email a exceo*/ Log.relatarExceptionEmail(className, ex.getMessage(), arquivoDir); } catch (IOException | SecurityException e) { Logger.getLogger(Log.class.getName()).log(Level.SEVERE, null, e); } }