List of usage examples for java.io IOException getLocalizedMessage
public String getLocalizedMessage()
From source file:br.com.nordestefomento.jrimum.bopepo.view.guia.ViewerPDF.java
/** * @throws JRimumException/*from w w w . jav a 2s .c o m*/ * * @return */ protected ByteArrayOutputStream getStream() { ByteArrayOutputStream baos = null; try { processarPdf(); baos = FileUtil.bytes2Stream(outputStream.toByteArray()); } catch (IOException e) { log.error("Erro durante a criao do stream. " + e.getLocalizedMessage(), e); throw new JRimumException( "Erro durante a criao do stream. " + "Causado por " + e.getLocalizedMessage(), e); } catch (DocumentException e) { log.error("Erro durante a criao do stream. " + e.getLocalizedMessage(), e); throw new JRimumException( "Erro durante a criao do stream. " + "Causado por " + e.getLocalizedMessage(), e); } return baos; }
From source file:br.com.nordestefomento.jrimum.bopepo.view.guia.ViewerPDF.java
/** * @throws JRimumException//from w ww. j a v a 2s. c o m * * @return */ protected byte[] getBytes() { byte[] bytes = null; try { processarPdf(); bytes = outputStream.toByteArray(); } catch (IOException e) { log.error("Erro durante a criao do stream. " + e.getLocalizedMessage(), e); throw new JRimumException( "Erro durante a criao do stream. " + "Causado por " + e.getLocalizedMessage(), e); } catch (DocumentException e) { log.error("Erro durante a criao do stream. " + e.getLocalizedMessage(), e); throw new JRimumException( "Erro durante a criao do stream. " + "Causado por " + e.getLocalizedMessage(), e); } return bytes; }
From source file:net.sf.logsaw.index.internal.LuceneIndexServiceImpl.java
private Query convertToQuery(final List<ARestriction<?>> restrictions) { if (restrictions.isEmpty()) { // Unrestricted return new MatchAllDocsQuery(); }/*from w w w . java 2s . c om*/ final BooleanQuery query = new BooleanQuery(); // Setup visitor IRestrictionVisitor visitor = new IRestrictionVisitor() { /* (non-Javadoc) * @see net.sf.logsaw.core.query.IRestrictionVisitor#visit(net.sf.logsaw.core.query.DateRestriction) */ @Override public void visit(final DateRestriction restriction) { ILogEntryFieldVisitor visitor = new LogEntryFieldVisitorAdapter() { /* (non-Javadoc) * @see net.sf.logsaw.core.model.LogEntryFieldVisitorAdapter#visit(net.sf.logsaw.core.model.DateLogEntryField) */ @Override public void visit(DateLogEntryField fld) { if (restriction.getOperator().equals(Operators.OPERATOR_BEFORE)) { query.add(NumericRangeQuery.newLongRange(fld.getKey(), null, fld.toIndexedValue(restriction.getValue()), false, false), Occur.MUST); } else if (restriction.getOperator().equals(Operators.OPERATOR_AFTER)) { query.add( NumericRangeQuery.newLongRange(fld.getKey(), fld.toIndexedValue(restriction.getValue()), null, false, false), Occur.MUST); } } }; restriction.getField().visit(visitor); } /* (non-Javadoc) * @see net.sf.logsaw.core.query.IRestrictionVisitor#visit(net.sf.logsaw.core.query.LevelRestriction) */ @Override public void visit(final LevelRestriction restriction) { ILogEntryFieldVisitor visitor = new LogEntryFieldVisitorAdapter() { /* (non-Javadoc) * @see net.sf.logsaw.core.model.LogEntryFieldVisitorAdapter#visit(net.sf.logsaw.core.model.LevelLogEntryField) */ @Override public void visit(LevelLogEntryField fld) { if (restriction.getOperator().equals(Operators.OPERATOR_GREATER_THAN)) { query.add(NumericRangeQuery.newIntRange(fld.getKey(), restriction.getValue().getValue(), null, false, false), Occur.MUST); } else if (restriction.getOperator().equals(Operators.OPERATOR_LESS_THAN)) { query.add(NumericRangeQuery.newIntRange(fld.getKey(), null, fld.toIndexedValue(restriction.getValue()), false, false), Occur.MUST); } else if (restriction.getOperator().equals(Operators.OPERATOR_EQUALS)) { query.add(NumericRangeQuery.newIntRange(fld.getKey(), fld.toIndexedValue(restriction.getValue()), fld.toIndexedValue(restriction.getValue()), true, true), Occur.MUST); } else if (restriction.getOperator().equals(Operators.OPERATOR_NOT_EQUALS)) { query.add( NumericRangeQuery.newIntRange(fld.getKey(), fld.toIndexedValue(restriction.getValue()), fld.toIndexedValue(restriction.getValue()), true, true), Occur.MUST_NOT); if (isAllNegative(restrictions) && restrictions.get(0).equals(restriction)) { // By design Lucene does not process negative-only queries query.add(new MatchAllDocsQuery(), Occur.SHOULD); } } } }; restriction.getField().visit(visitor); } /* (non-Javadoc) * @see net.sf.logsaw.core.query.IRestrictionVisitor#visit(net.sf.logsaw.core.query.StringRestriction) */ @Override public void visit(final StringRestriction restriction) { if (restriction.getOperator().equals(Operators.OPERATOR_CONTAINS)) { try { // Setup phrase query with tokenized query string PhraseQuery phrase = new PhraseQuery(); fillPhraseQuery(phrase, getAnalyzer(), restriction.getField().getKey(), restriction.getValue()); query.add(phrase, Occur.MUST); } catch (IOException e) { logger.error(e.getLocalizedMessage(), e); } } else if (restriction.getOperator().equals(Operators.OPERATOR_NOT_CONTAINS)) { try { // Setup phrase query with tokenized query string PhraseQuery phrase = new PhraseQuery(); fillPhraseQuery(phrase, getAnalyzer(), restriction.getField().getKey(), restriction.getValue()); query.add(phrase, Occur.MUST_NOT); if (isAllNegative(restrictions) && restrictions.get(0).equals(restriction)) { // By design Lucene does not process negative-only queries query.add(new MatchAllDocsQuery(), Occur.SHOULD); } } catch (IOException e) { logger.error(e.getLocalizedMessage(), e); } } else if (restriction.getOperator().equals(Operators.OPERATOR_EQUALS)) { query.add(new TermQuery(new Term(restriction.getField().getKey(), restriction.getValue())), Occur.MUST); } else if (restriction.getOperator().equals(Operators.OPERATOR_NOT_EQUALS)) { query.add(new TermQuery(new Term(restriction.getField().getKey(), restriction.getValue())), Occur.MUST_NOT); if (isAllNegative(restrictions) && restrictions.get(0).equals(restriction)) { // By design Lucene does not process negative-only queries query.add(new MatchAllDocsQuery(), Occur.SHOULD); } } else if (restriction.getOperator().equals(Operators.OPERATOR_BEGINS_WITH)) { query.add(new PrefixQuery(new Term(restriction.getField().getKey(), restriction.getValue())), Occur.MUST); } else if (restriction.getOperator().equals(Operators.OPERATOR_NOT_BEGINS_WITH)) { query.add(new PrefixQuery(new Term(restriction.getField().getKey(), restriction.getValue())), Occur.MUST_NOT); if (isAllNegative(restrictions) && restrictions.get(0).equals(restriction)) { // By design Lucene does not process negative-only queries query.add(new MatchAllDocsQuery(), Occur.SHOULD); } } } }; for (ARestriction<?> restriction : restrictions) { restriction.visit(visitor); } return query; }
From source file:edu.wpi.margrave.MCommunicator.java
/** * Places a "success" message on the output buffer. This message * lets the caller (Racket) know that it is safe to begin sending * commands./*from w ww .j a v a 2 s . com*/ * */ public static void sendReadyReply() { Document theResponse = MEnvironment.successResponse(); addBuffers(theResponse); writeToLog("Returning: " + transformXMLToString(theResponse) + "\n"); try { out.write(transformXMLToByteArray(theResponse)); } catch (IOException ex) { Logger.getLogger(MCommunicator.class.getName()).log(Level.SEVERE, null, ex); writeToLog( "\nIOException in handleXMLCommand while parsing command stream: " + ex.getLocalizedMessage()); System.exit(1); } }
From source file:it.geosolutions.geobatch.geotiff.retile.GeotiffRetiler.java
public Queue<FileSystemEvent> execute(Queue<FileSystemEvent> events) throws ActionException { try {/*from w w w . j a v a 2 s . c om*/ if (configuration == null) { final String message = "GeotiffRetiler::execute(): flow configuration is null."; if (LOGGER.isErrorEnabled()) LOGGER.error(message); throw new ActionException(this, message); } if (events.size() == 0) { throw new ActionException(this, "GeotiffRetiler::execute(): Unable to process an empty events queue."); } if (LOGGER.isInfoEnabled()) LOGGER.info("GeotiffRetiler::execute(): Starting with processing..."); listenerForwarder.started(); // The return final Queue<FileSystemEvent> ret = new LinkedList<FileSystemEvent>(); while (events.size() > 0) { FileSystemEvent event = events.remove(); File eventFile = event.getSource(); FileSystemEventType eventType = event.getEventType(); if (eventFile.exists() && eventFile.canRead() && eventFile.canWrite()) { /* * If here: we can start retiler actions on the incoming file event */ if (eventFile.isDirectory()) { File[] fileList = eventFile.listFiles(); int size = fileList.length; for (int progress = 0; progress < size; progress++) { File inFile = fileList[progress]; final String absolutePath = inFile.getAbsolutePath(); final String inputFileName = FilenameUtils.getName(absolutePath); if (LOGGER.isInfoEnabled()) LOGGER.info("is going to retile: " + inputFileName); try { listenerForwarder.setTask("GeotiffRetiler"); File tiledTiffFile = File.createTempFile(inFile.getName(), "_tiled.tif", getTempDir()); if (tiledTiffFile.exists()) { // file already exists // check write permission if (!tiledTiffFile.canWrite()) { final String message = "Unable to over-write the temporary file called: " + tiledTiffFile.getAbsolutePath() + "\nCheck permissions."; if (LOGGER.isErrorEnabled()) { LOGGER.error(message); } throw new IllegalArgumentException(message); } } else if (!tiledTiffFile.createNewFile()) { final String message = "Unable to create temporary file called: " + tiledTiffFile.getAbsolutePath(); if (LOGGER.isErrorEnabled()) { LOGGER.error(message); } throw new IllegalArgumentException(message); } final double compressionRatio = getConfiguration().getCompressionRatio(); final String compressionType = getConfiguration().getCompressionScheme(); reTile(inFile, tiledTiffFile, compressionRatio, compressionType, getConfiguration().getTileW(), getConfiguration().getTileH(), getConfiguration().isForceToBigTiff()); String extension = FilenameUtils.getExtension(inputFileName); if (!extension.contains("tif")) { extension = "tif"; } final String outputFileName = FilenameUtils.getFullPath(absolutePath) + FilenameUtils.getBaseName(inputFileName) + "." + extension; final File outputFile = new File(outputFileName); // do we need to remove the input? FileUtils.copyFile(tiledTiffFile, outputFile); FileUtils.deleteQuietly(tiledTiffFile); // set the output /* * COMMENTED OUT 21 Feb 2011: simone: If the event represents a Dir * we have to return a Dir. Do not matter failing files. * * carlo: we may also want to check if a file is already tiled! * * File outputFile=reTile(inFile); if (outputFile!=null){ //TODO: * here we use the same event for each file in the ret.add(new * FileSystemEvent(outputFile, eventType)); } */ } catch (UnsupportedOperationException uoe) { listenerForwarder.failed(uoe); if (LOGGER.isWarnEnabled()) LOGGER.warn(uoe.getLocalizedMessage(), uoe); continue; } catch (IOException ioe) { listenerForwarder.failed(ioe); if (LOGGER.isWarnEnabled()) LOGGER.warn(ioe.getLocalizedMessage(), ioe); continue; } catch (IllegalArgumentException iae) { listenerForwarder.failed(iae); if (LOGGER.isWarnEnabled()) LOGGER.warn(iae.getLocalizedMessage(), iae); continue; } finally { listenerForwarder.setProgress((progress * 100) / ((size != 0) ? size : 1)); listenerForwarder.progressing(); } } if (LOGGER.isInfoEnabled()) LOGGER.info("SUCCESSFULLY completed work on: " + event.getSource()); // add the directory to the return ret.add(event); } else { // file is not a directory try { listenerForwarder.setTask("GeotiffRetiler"); File tiledTiffFile = File.createTempFile(eventFile.getName(), "_tiled.tif", eventFile.getParentFile()); if (tiledTiffFile.exists()) { // file already exists // check write permission if (!tiledTiffFile.canWrite()) { final String message = "Unable to over-write the temporary file called: " + tiledTiffFile.getAbsolutePath() + "\nCheck permissions."; if (LOGGER.isErrorEnabled()) { LOGGER.error(message); } throw new IllegalArgumentException(message); } } else if (!tiledTiffFile.createNewFile()) { final String message = "Unable to create temporary file called: " + tiledTiffFile.getAbsolutePath(); if (LOGGER.isErrorEnabled()) { LOGGER.error(message); } throw new IllegalArgumentException(message); } final double compressionRatio = getConfiguration().getCompressionRatio(); final String compressionType = getConfiguration().getCompressionScheme(); reTile(eventFile, tiledTiffFile, compressionRatio, compressionType, getConfiguration().getTileW(), getConfiguration().getTileH(), getConfiguration().isForceToBigTiff()); String extension = FilenameUtils.getExtension(eventFile.getName()); if (!extension.contains("tif")) { extension = "tif"; } final String outputFileName = FilenameUtils.getFullPath(eventFile.getAbsolutePath()) + FilenameUtils.getBaseName(eventFile.getName()) + "." + extension; final File outputFile = new File(outputFileName); // do we need to remove the input? FileUtils.copyFile(tiledTiffFile, outputFile); FileUtils.deleteQuietly(tiledTiffFile); if (LOGGER.isInfoEnabled()) LOGGER.info("SUCCESSFULLY completed work on: " + event.getSource()); listenerForwarder.setProgress(100); ret.add(new FileSystemEvent(outputFile, eventType)); } catch (UnsupportedOperationException uoe) { listenerForwarder.failed(uoe); if (LOGGER.isWarnEnabled()) LOGGER.warn(uoe.getLocalizedMessage(), uoe); continue; } catch (IOException ioe) { listenerForwarder.failed(ioe); if (LOGGER.isWarnEnabled()) LOGGER.warn(ioe.getLocalizedMessage(), ioe); continue; } catch (IllegalArgumentException iae) { listenerForwarder.failed(iae); if (LOGGER.isWarnEnabled()) LOGGER.warn(iae.getLocalizedMessage(), iae); continue; } finally { listenerForwarder.setProgress((100) / ((events.size() != 0) ? events.size() : 1)); listenerForwarder.progressing(); } } } else { final String message = "The passed file event refers to a not existent " + "or not readable/writeable file! File: " + eventFile.getAbsolutePath(); if (LOGGER.isWarnEnabled()) LOGGER.warn(message); final IllegalArgumentException iae = new IllegalArgumentException(message); listenerForwarder.failed(iae); } } // endwile listenerForwarder.completed(); // return if (ret.size() > 0) { events.clear(); return ret; } else { /* * If here: we got an error no file are set to be returned the input queue is * returned */ return events; } } catch (Exception t) { if (LOGGER.isErrorEnabled()) LOGGER.error(t.getLocalizedMessage(), t); final ActionException exc = new ActionException(this, t.getLocalizedMessage(), t); listenerForwarder.failed(exc); throw exc; } }
From source file:com.photon.phresco.framework.impl.CIManagerImpl.java
public CIJobStatus deleteCI(CIJob job, List<String> builds) throws PhrescoException { S_LOGGER.debug("Entering Method CIManagerImpl.deleteCI(CIJob job)"); S_LOGGER.debug("Job name " + job.getName()); cli = getCLI(job);// w ww . j a v a 2 s . co m String deleteType = null; List<String> argList = new ArrayList<String>(); S_LOGGER.debug("job name " + job.getName()); S_LOGGER.debug("Builds " + builds); if (CollectionUtils.isEmpty(builds)) { // delete job S_LOGGER.debug("Job deletion started"); S_LOGGER.debug("Command " + FrameworkConstants.CI_JOB_DELETE_COMMAND); deleteType = "Job"; argList.add(FrameworkConstants.CI_JOB_DELETE_COMMAND); argList.add(job.getName()); } else { // delete Build S_LOGGER.debug("Build deletion started"); deleteType = "Build"; argList.add(FrameworkConstants.CI_BUILD_DELETE_COMMAND); argList.add(job.getName()); StringBuilder result = new StringBuilder(); for (String string : builds) { result.append(string); result.append(","); } String buildNos = result.substring(0, result.length() - 1); argList.add(buildNos); S_LOGGER.debug("Command " + FrameworkConstants.CI_BUILD_DELETE_COMMAND); S_LOGGER.debug("Build numbers " + buildNos); } try { int status = cli.execute(argList); String message = deleteType + " deletion started in jenkins"; if (status == FrameworkConstants.JOB_STATUS_NOTOK) { deleteType = deleteType.substring(0, 1).toLowerCase() + deleteType.substring(1); message = "Error while deleting " + deleteType + " in jenkins"; } if (debugEnabled) { S_LOGGER.debug("Delete CI Status " + status); S_LOGGER.debug("Delete CI Message " + message); } return new CIJobStatus(status, message); } finally { if (cli != null) { try { cli.close(); } catch (IOException e) { if (debugEnabled) { S_LOGGER.error("Entered into catch block of CIManagerImpl.deleteCI(CIJob job) " + e.getLocalizedMessage()); } } catch (InterruptedException e) { if (debugEnabled) { S_LOGGER.error("Entered into catch block of CIManagerImpl.deleteCI(CIJob job) " + e.getLocalizedMessage()); } } } } }
From source file:de.knowwe.defi.usermanager.XMLUserDatabase.java
private void saveDOM() throws WikiSecurityException { if (c_dom == null) { Log.severe("User database doesn't exist in memory."); }/* www . ja va 2 s . c o m*/ File newFile = new File(c_file.getAbsolutePath() + ".new"); try { BufferedWriter io = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(newFile), "UTF-8")); // Write the file header and document root io.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"); io.write("<users>\n"); // Write each profile as a <user> node Element root = c_dom.getDocumentElement(); NodeList nodes = root.getElementsByTagName(USER_TAG); for (int i = 0; i < nodes.getLength(); i++) { Element user = (Element) nodes.item(i); io.write(" <" + USER_TAG + " "); io.write(UID); io.write("=\"" + user.getAttribute(UID) + "\" "); io.write(LOGIN_NAME); io.write("=\"" + user.getAttribute(LOGIN_NAME) + "\" "); io.write(WIKI_NAME); io.write("=\"" + user.getAttribute(WIKI_NAME) + "\" "); io.write(FULL_NAME); io.write("=\"" + user.getAttribute(FULL_NAME) + "\" "); io.write(EMAIL); io.write("=\"" + user.getAttribute(EMAIL) + "\" "); io.write(PASSWORD); io.write("=\"" + user.getAttribute(PASSWORD) + "\" "); io.write(CREATED); io.write("=\"" + user.getAttribute(CREATED) + "\" "); io.write(LAST_MODIFIED); io.write("=\"" + user.getAttribute(LAST_MODIFIED) + "\" "); io.write(LOCK_EXPIRY); io.write("=\"" + user.getAttribute(LOCK_EXPIRY) + "\" "); io.write(">"); NodeList attributes = user.getElementsByTagName(ATTRIBUTES_TAG); for (int j = 0; j < attributes.getLength(); j++) { Element attribute = (Element) attributes.item(j); String value = extractText(attribute); io.write("\n <" + ATTRIBUTES_TAG + ">"); io.write(value); io.write("</" + ATTRIBUTES_TAG + ">"); } io.write("\n </" + USER_TAG + ">\n"); } io.write("</users>"); io.close(); } catch (IOException e) { throw new WikiSecurityException(e.getLocalizedMessage(), e); } // Copy new file over old version File backup = new File(c_file.getAbsolutePath() + ".old"); if (backup.exists()) { if (!backup.delete()) { Log.severe("Could not delete old user database backup: " + backup); } } if (!c_file.renameTo(backup)) { Log.severe("Could not create user database backup: " + backup); } if (!newFile.renameTo(c_file)) { Log.severe("Could not save database: " + backup + " restoring backup."); if (!backup.renameTo(c_file)) { Log.severe("Restore failed. Check the file permissions."); } Log.severe("Could not save database: " + c_file + ". Check the file permissions"); } }
From source file:cross.datastructures.workflow.DefaultWorkflow.java
@Override public void save() { try {/* w w w . j a va 2 s. c o m*/ final String wflname = getName(); log.info("Saving workflow {}", wflname); final Document doc = new Document(); final ProcessingInstruction pi = new ProcessingInstruction("xml-stylesheet", "type=\"text/xsl\" href=\"http://maltcms.sourceforge.net/res/maltcmsHTMLResult.xsl\""); doc.addContent(pi); doc.addContent(writeXML()); final XMLOutputter outp = new XMLOutputter(Format.getPrettyFormat()); try { final File f = new File(getOutputDirectory(), getName() + ".xml");//new File(wflname); final File dir = f.getParentFile(); dir.mkdirs(); f.createNewFile(); outp.output(doc, new BufferedOutputStream(new FileOutputStream(f))); if (this.saveHTML) { saveHTML(f); } if (this.saveTEXT) { saveTEXT(f); } } catch (final IOException e) { log.error(e.getLocalizedMessage()); } } catch (final FileNotFoundException e) { log.error(e.getLocalizedMessage()); } catch (final IOException e) { log.error(e.getLocalizedMessage()); } }
From source file:org.zenoss.zep.dao.impl.EventDaoHelper.java
/** * Merges the old and new event detail lists. Uses the EventDetailMergeBehavior setting * to determine how details with the same name in both lists should be handled. * * @param oldDetails Old event details.//from ww w . j a v a 2 s. co m * @param newDetails New event details. * @return A JSON string of the details aftering merging. * @throws org.zenoss.zep.ZepException X */ public String mergeDetailsToJson(List<EventDetail> oldDetails, List<EventDetail> newDetails) throws ZepException { Map<String, EventDetail> detailsMap = mergeDetails(oldDetails, newDetails); try { String results = JsonFormat.writeAllDelimitedAsString(detailsMap.values()); long eventMaxSizeBytes = zepConfigService.getConfig().getEventMaxSizeBytes(); if (!isValidDetailsSize(results, eventMaxSizeBytes)) { final String newDetailsJson = JsonFormat.writeAllDelimitedAsString(newDetails); if (isValidDetailsSize(newDetailsJson, eventMaxSizeBytes)) { // new details are a valid size, truncate the old and use the new results = newDetailsJson; logger.warn("Truncating old details because details are not a valid size: " + oldDetails); } else { // If the entire set of new details is not small enough, // truncate all non-zenoss details. final String originalResults = results; final List<EventDetail> newZenossDetails = removeNonZenossDetails(newDetails); results = JsonFormat.writeAllDelimitedAsString(newZenossDetails); logger.warn("Truncating old details because details are not a valid size. " + "New non-Zenoss details have also been truncated due to size. " + "ORIGINAL DATA: " + originalResults); } } return results; } catch (IOException e) { throw new ZepException(e.getLocalizedMessage(), e); } }
From source file:es.juntadeandalucia.panelGestion.presentacion.controlador.impl.GeosearchController.java
public void createNewCore() { String errorMessage = null;/*w w w . j av a 2s. co m*/ try { if (StringUtils.isEmpty(newCoreName)) { errorMessage = "Debe especificar un nombre para el nuevo core"; } else if (!Utils.isValidName(newCoreName)) { errorMessage = "El nombre para el nuevo core es invlido: " + newCoreName; } else if (Geosearch.existsCore(newCoreName)) { errorMessage = "El core '".concat(newCoreName).concat("' ya existe en Geobsquedas"); } else { boolean created = Geosearch.createCore(newCoreName); if (created) { // reloads the core list cores = Geosearch.getCores(); // selects the new core core = newCoreName; // reset newCore flag newCore = false; // triggers the onSelectCore event onSelectCore(); // informs to the user String successMessage = "El core '".concat(core).concat("' ha sido creado satisfactoriamente"); StatusMessages.instance().add(Severity.INFO, successMessage); log.info(successMessage); } else { errorMessage = "El core no ha podido crearse. Compruebe la configuracin de Geobsquedas"; } } } catch (IOException e) { errorMessage = "Error en la creacin de la carpeta del core: " + e.getLocalizedMessage(); } catch (GeosearchException e) { errorMessage = "Error en la creacin del core: " + e.getLocalizedMessage(); } if (errorMessage != null) { StatusMessages.instance().add(Severity.ERROR, errorMessage); log.error(errorMessage); } }