List of usage examples for java.lang Exception getCause
public synchronized Throwable getCause()
From source file:com.hiperium.bo.control.impl.DeviceBOImpl.java
/** * * @param context//ww w . jav a2 s.c om * @return * @throws Exception */ @AroundInvoke private Object validateMethod(InvocationContext context) throws Exception, InformationException { this.log.debug("validateMethod() - BEGIN: " + context.getMethod().getName()); String methodName = context.getMethod().getName(); Object result = null; // INTERCEPTS ONLY DEVICE OPERATION METHODS if ("userOperation".equals(methodName) || "homeOperation".equals(methodName)) { Object[] params = context.getParameters(); String sessionId = (String) params[1]; if (StringUtils.isBlank(sessionId) || !this.sessionManager.isUserLoggedIn(sessionId)) { throw InformationException.generate(EnumI18N.COMMON, EnumInformationException.ACCESS_NOT_ALLOWED, Locale.getDefault()); } // PROCEED WITH METHOD CALL try { DeviceDTO deviceDTO = (DeviceDTO) params[0]; super.getDaoFactory().getDeviceDAO().updateDeviceState(deviceDTO); result = context.proceed(); this.userDeviceAuditBO.create(deviceDTO, sessionId); } catch (Exception e) { InformationException infoException = null; if (e.getCause() instanceof InformationException) { infoException = (InformationException) e; throw infoException; } infoException = this.exceptionManager.createMessageException(e, this.sessionManager.findUserLocale(sessionId)); throw infoException; } } else { result = context.proceed(); } this.log.debug("validateMethod() - END: " + context.getMethod().getName()); return result; }
From source file:gov.nih.nci.caarray.application.project.FileUploadUtils.java
private void addUploadedFile(Project project, File file, String fileName) throws InvalidFileException { try {/*w w w.ja va2s . c om*/ final ProjectManagementService pms = getProjectManagementService(); if (!checkAlreadyAdded(project.getFileNames(), fileName)) { pms.addFile(project, file, fileName); result.addSuccessfulFile(fileName); } } catch (Exception e) { if (e.getCause() instanceof InvalidStateException) { result.addConflictingFile(fileName); } else { throw new InvalidFileException(fileName, ADDING_FILE_ERROR_KEY, result, ADDING_FILE_ERROR_MESSAGE, e); } } }
From source file:com.mirth.connect.client.ui.MessageExportDialog.java
private void export() { String errorMessage = messageExportPanel.validate(true); if (StringUtils.isNotEmpty(errorMessage)) { parent.alertError(this, errorMessage); return;// ww w .ja va2 s . co m } int exportCount = 0; MessageWriterOptions writerOptions = messageExportPanel.getMessageWriterOptions(); if (StringUtils.isBlank(writerOptions.getRootFolder())) { parent.alertError(parent, "Please enter a valid root path to store exported files."); setVisible(true); return; } setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try { if (messageExportPanel.isExportLocal()) { PaginatedMessageList messageList = new PaginatedMessageList(); messageList.setChannelId(channelId); messageList.setClient(parent.mirthClient); messageList.setMessageFilter(messageFilter); messageList.setPageSize(pageSize); messageList.setIncludeContent(true); writerOptions.setBaseFolder(SystemUtils.getUserHome().getAbsolutePath()); MessageWriter messageWriter = MessageWriterFactory.getInstance().getMessageWriter(writerOptions, encryptor); AttachmentSource attachmentSource = null; if (writerOptions.includeAttachments()) { attachmentSource = new AttachmentSource() { @Override public List<Attachment> getMessageAttachments(Message message) throws ClientException { return PlatformUI.MIRTH_FRAME.mirthClient .getAttachmentsByMessageId(message.getChannelId(), message.getMessageId()); } }; } try { exportCount = new MessageExporter().exportMessages(messageList, messageWriter, attachmentSource); messageWriter.finishWrite(); } finally { messageWriter.close(); } } else { writerOptions.setIncludeAttachments(messageExportPanel.isIncludeAttachments()); exportCount = parent.mirthClient.exportMessagesServer(channelId, messageFilter, pageSize, writerOptions); } setVisible(false); setCursor(Cursor.getDefaultCursor()); parent.alertInformation(parent, exportCount + " message" + ((exportCount == 1) ? " has" : "s have") + " been successfully exported to: " + writerOptions.getRootFolder()); } catch (Exception e) { setCursor(Cursor.getDefaultCursor()); Throwable cause = (e.getCause() == null) ? e : e.getCause(); parent.alertThrowable(parent, cause); } }
From source file:com.mirth.connect.connectors.http.HttpMessageReceiver.java
@Override public void doStop() throws UMOException { super.doStop(); try {/*from w w w.j a v a 2 s . c om*/ logger.debug("stopping HTTP server"); server.stop(); } catch (Exception e) { throw new MuleException(new Message(Messages.FAILED_TO_STOP_X, "HTTP Listener"), e.getCause()); } }
From source file:info.sargis.eventbus.config.EventBusHandlerBeanDefinitionParser.java
protected void parseTypeFilters(Element element, ClassPathBeanDefinitionScanner scanner, XmlReaderContext readerContext, ParserContext parserContext) { // Parse exclude and include filter elements. ClassLoader classLoader = scanner.getResourceLoader().getClassLoader(); NodeList nodeList = element.getChildNodes(); for (int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { String localName = parserContext.getDelegate().getLocalName(node); try { if (INCLUDE_FILTER_ELEMENT.equals(localName)) { TypeFilter typeFilter = createTypeFilter((Element) node, classLoader); scanner.addIncludeFilter(typeFilter); } else if (EXCLUDE_FILTER_ELEMENT.equals(localName)) { TypeFilter typeFilter = createTypeFilter((Element) node, classLoader); scanner.addExcludeFilter(typeFilter); }/*from ww w . j av a2 s. co m*/ } catch (Exception ex) { readerContext.error(ex.getMessage(), readerContext.extractSource(element), ex.getCause()); } } } }
From source file:org.geowebcache.rest.RESTDispatcher.java
protected ModelAndView handleRequestInternal(HttpServletRequest req, HttpServletResponse resp) throws Exception { try {//from w w w . ja v a 2 s. co m myConverter.service(req, resp); } catch (Exception e) { RestletException re = null; if (e instanceof RestletException) { re = (RestletException) e; } if (re == null && e.getCause() instanceof RestletException) { re = (RestletException) e.getCause(); } if (re != null) { resp.setStatus(re.getStatus().getCode()); // This does not actually write anything? //re.getRepresentation().write(resp.getOutputStream()); String reStr = re.getRepresentation().getText(); resp.getOutputStream().write(reStr.getBytes()); resp.getOutputStream().flush(); } else { resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); new StringRepresentation(e.getMessage()).write(resp.getOutputStream()); resp.getOutputStream().flush(); } } return null; }
From source file:ste.web.http.velocity.BugFreeVelocityHandler.java
@Test public void viewError() { try {/*from w w w . j av a 2s . c o m*/ context.setAttribute(ATTR_VIEW, TEST_ERROR_VIEW1); handler.handle(request, response, context); fail(TEST_ERROR_VIEW1 + " error shall throw a HttpException"); } catch (Exception x) { // // OK // then(x.getCause()).isInstanceOf(ParseErrorException.class); } }
From source file:com.google.enterprise.connector.sharepoint.dao.SimpleSharePointDAO.java
/** * Uses Spring's SimpleJdbcTemplate's batch update feature for executing * multiple update queries in one go. It also takes care of any failure that * might occur during the execution. Driver implementation may or may not * proceed to the next query execution in a batch if any one fails in between. * Here is a description of what happens when a failure occurs: * <p>//from ww w. jav a2 s. co m * If the driver, at least, has attempted execution of all the queries in * batch, nothing will be done. exception will be just logged into the log as * a warning. * </p> * <p> * If the driver has stopped processing the queries because of any failure at * a particular index, than all the queries starting from that index are * executed individually. * </p> * <p> * If the driver has failed to execute the whole request due to any reason viz * it does not support batchUpdate etc., the queries are still send for * individual execution * </p> * One important of batchUpdate queries are that the exact failure of an * individual query is not known. However, whatever the reason is, it is * ensured to be unrecoverable. So, the best the connector can do and it does * is to log such events and proceed. Such scenarios, of course, can leave the * user data store in a bad state. * * @param params an array of {@link SqlParameterSource}; each representing the * parameters to construct one SQL query. Hence, The length of the * array will indicate the number of SQL queries executed in batch. * @param query query to be executed specified as {@link Query} * @return status of each query execution (=no. of rows updated) in the same * order in which the queries were specified * @throws SharepointException */ public int[] batchUpdate(Query query, SqlParameterSource[] params) throws SharepointException { if (null == params || 0 == params.length) { return null; } int[] batchStatus = null; try { batchStatus = getSimpleJdbcTemplate().batchUpdate(getSqlQuery(query), params); LOGGER.info("BatchUpdate completed successfully for #" + batchStatus.length + " records. Query [ " + query + " ] "); } catch (Exception e) { if (null == e.getCause() || (!(e.getCause() instanceof BatchUpdateException) && !(e.getCause() instanceof SQLException))) { LOGGER.log(Level.WARNING, "BatchUpdate failed for query [ " + query + " ]", e); } else { if ((e.getCause() instanceof BatchUpdateException)) { batchStatus = handleBatchUpdateExceptionForMSSQLAndMySQL((BatchUpdateException) e.getCause(), query, params); LOGGER.info("BatchUpdate completed with a fallback for #" + batchStatus.length + " records. Query [ " + query + " ] "); } else { batchStatus = handleBatchUpdateExceptionForOracle((SQLException) e.getCause(), query, params); LOGGER.info("BatchUpdate completed with a fallback for #" + batchStatus.length + " records. Query [ " + query + " ] "); } } } catch (Throwable t) { // This would be an error. No point in retrying, so no // fall-back.. throw new SharepointException("Batch execution failed abruptly!! ", t); } return batchStatus; }
From source file:cn.ctyun.amazonaws.services.s3.transfer.internal.UploadMonitor.java
private List<PartETag> collectPartETags() { final List<PartETag> partETags = new ArrayList<PartETag>(futures.size()); for (Future<PartETag> future : futures) { try {//from w w w . j a va 2 s . c om partETags.add(future.get()); } catch (Exception e) { throw new AmazonClientException("Unable to upload part: " + e.getCause().getMessage(), e.getCause()); } } return partETags; }
From source file:com.glaf.core.job.BaseJob.java
public void execute(JobExecutionContext context) throws JobExecutionException { String taskId = context.getJobDetail().getJobDataMap().getString("taskId"); ISysSchedulerService sysSchedulerService = ContextFactory.getBean("sysSchedulerService"); ISchedulerLogService schedulerLogService = ContextFactory.getBean("schedulerLogService"); Scheduler scheduler = sysSchedulerService.getSchedulerByTaskId(taskId); if (scheduler != null) { if (scheduler.getRunType() == 0) {// ??? if (scheduler.getRunStatus() != 0) {// ?? logger.info(scheduler.getTitle() + " ????????????"); return; }/*from ww w . j a va 2 s . co m*/ } else if (scheduler.getRunType() == 1) {// ??????? if (scheduler.getRunStatus() == 1) {// ? logger.info(scheduler.getTitle() + " ???"); return; } } SchedulerLog log = new SchedulerLog(); log.setId(UUID32.getUUID()); log.setTaskId(scheduler.getTaskId()); log.setTaskName(scheduler.getTaskName()); log.setTitle(scheduler.getTitle()); log.setContent(scheduler.getContent()); if (Authentication.getAuthenticatedActorId() != null) { log.setCreateBy(Authentication.getAuthenticatedActorId()); } else { log.setCreateBy("system"); } log.setStartDate(new Date()); long start = System.currentTimeMillis(); boolean success = false; Date now = new Date(); try { scheduler.setPreviousFireTime(context.getPreviousFireTime()); scheduler.setNextFireTime(context.getNextFireTime()); scheduler.setRunStatus(1);// ?? sysSchedulerService.update(scheduler); log.setStatus(1); schedulerLogService.save(log); logger.info(scheduler.getTitle() + " ?: " + DateUtils.getDateTime(context.getNextFireTime())); this.runJob(context); success = true; } catch (Exception ex) { success = false; ex.printStackTrace(); logger.error(ex); if (ex.getCause() != null) { log.setExitMessage(ex.getCause().getMessage()); } else { log.setExitMessage(ex.getMessage()); } // throw new RuntimeException(ex); } finally { long jobRunTime = System.currentTimeMillis() - start; log.setJobRunTime(jobRunTime); log.setEndDate(new Date()); scheduler.setJobRunTime(jobRunTime); scheduler.setPreviousFireTime(now); if (success) { scheduler.setRunStatus(2);// ???? log.setStatus(2); } else { scheduler.setRunStatus(3);// ?? log.setStatus(3); } if (scheduler.getRunType() == 0 && success) { scheduler.setNextFireTime(null); } sysSchedulerService.update(scheduler); schedulerLogService.save(log); } } }