List of usage examples for java.lang Throwable toString
public String toString()
From source file:com.adflake.AdFlakeManager.java
public void prepareVideoAdaptersForLayout(AdFlakeLayout adFlakeLayout) { Log.d(AdFlakeUtil.ADFLAKE, "prepareVideoAdaptersForLayout"); for (Ration ration : _videoRationsList) { try {/*from www . j a va 2 s . co m*/ switch (ration.type) { case AdFlakeUtil.NETWORK_TYPE_ADCOLONY: AdColonyVideoAdsAdapter.prepareForRation(ration, adFlakeLayout); break; default: break; } } catch (Throwable ex) { Log.e(AdFlakeUtil.ADFLAKE, "prepareVideoAdaptersForLayout failed to prepare for ration=" + ration.name + "\n error=" + ex.toString()); } } }
From source file:com.ctrip.infosec.rule.executor.RulesExecutorService.java
/** * /* w ww. j a va 2 s . c o m*/ */ void executeSerial(RiskFact fact) { // matchRules List<Rule> matchedRules = Configs.matchRules(fact, true); TraceLogger.traceLog("? " + matchedRules.size() + " ? ..."); StatelessRuleEngine statelessRuleEngine = SpringContextHolder.getBean(StatelessRuleEngine.class); StopWatch clock = new StopWatch(); for (Rule rule : matchedRules) { String packageName = rule.getRuleNo(); RuleMonitorHelper.newTrans(fact, RuleMonitorType.RULE, packageName); TraceLogger.beginNestedTrans(fact.eventId); TraceLogger.setNestedLogPrefix("[" + packageName + "]"); Contexts.setPolicyOrRuleNo(packageName); try { clock.reset(); clock.start(); // set default result if (!Constants.eventPointsWithScene.contains(fact.eventPoint)) { Map<String, Object> defaultResult = Maps.newHashMap(); defaultResult.put(Constants.riskLevel, 0); defaultResult.put(Constants.riskMessage, "PASS"); fact.results4Async.put(rule.getRuleNo(), defaultResult); } // add current execute ruleNo and logPrefix before execution fact.ext.put(Constants.key_ruleNo, rule.getRuleNo()); fact.ext.put(Constants.key_isAsync, true); statelessRuleEngine.execute(packageName, fact); // remove current execute ruleNo when finished execution. fact.ext.remove(Constants.key_ruleNo); fact.ext.remove(Constants.key_isAsync); clock.stop(); long handlingTime = clock.getTime(); if (!Constants.eventPointsWithScene.contains(fact.eventPoint)) { Map<String, Object> resultWithScene = fact.resultsGroupByScene4Async.get(packageName); if (resultWithScene != null) { resultWithScene.put(Constants.async, false); resultWithScene.put(Constants.timeUsage, handlingTime); TraceLogger.traceLog(">>>> [" + packageName + "] : [???] riskLevel = " + resultWithScene.get(Constants.riskLevel) + ", riskMessage = " + resultWithScene.get(Constants.riskMessage) + ", riskScene = " + resultWithScene.get(Constants.riskScene) + ", usage = " + resultWithScene.get(Constants.timeUsage) + "ms"); } Map<String, Object> result = fact.results4Async.get(packageName); if (result != null) { result.put(Constants.async, true); result.put(Constants.timeUsage, handlingTime); TraceLogger.traceLog(">>>> [" + packageName + "] : riskLevel = " + result.get(Constants.riskLevel) + ", riskMessage = " + result.get(Constants.riskMessage) + ", usage = " + result.get(Constants.timeUsage) + "ms"); } } else { Map<String, Object> result = fact.results4Async.get(packageName); if (result != null) { result.put(Constants.async, false); result.put(Constants.timeUsage, handlingTime); int riskLevel = MapUtils.getIntValue(result, Constants.riskLevel, 0); if (riskLevel > 0) { TraceLogger.traceLog(">>>> [" + packageName + "] : [?] riskLevel = " + result.get(Constants.riskLevel) + ", riskMessage = " + result.get(Constants.riskMessage) + ", usage = " + result.get(Constants.timeUsage) + "ms"); } } Map<String, Object> resultWithScene = fact.resultsGroupByScene4Async.get(packageName); if (resultWithScene != null) { resultWithScene.put(Constants.async, true); resultWithScene.put(Constants.timeUsage, handlingTime); TraceLogger.traceLog(">>>> [" + packageName + "] [?]: riskLevel = " + resultWithScene.get(Constants.riskLevel) + ", riskMessage = " + resultWithScene.get(Constants.riskMessage) + ", riskScene = " + resultWithScene.get(Constants.riskScene) + ", usage = " + resultWithScene.get(Constants.timeUsage) + "ms"); } else { TraceLogger.traceLog( ">>>> [" + packageName + "] [?]: ?"); } } } catch (Throwable ex) { logger.warn(Contexts.getLogPrefix() + ". packageName: " + packageName, ex); TraceLogger.traceLog("[" + rule.getRuleNo() + "] EXCEPTION: " + ex.toString()); } finally { TraceLogger.commitNestedTrans(); RuleMonitorHelper.commitTrans(fact); Contexts.clearLogPrefix(); } } }
From source file:com.mirth.connect.client.ui.LibraryResourcesPanel.java
public void initialize() { final String workingId = PlatformUI.MIRTH_FRAME.startWorking("Loading library resources..."); SwingWorker<List<LibraryProperties>, Void> worker = new SwingWorker<List<LibraryProperties>, Void>() { @Override/*w w w . j a v a 2 s. com*/ public List<LibraryProperties> doInBackground() throws ClientException { List<ResourceProperties> resourceProperties = PlatformUI.MIRTH_FRAME.mirthClient.getResources(); List<LibraryProperties> libraryProperties = new ArrayList<LibraryProperties>(); for (ResourceProperties resource : resourceProperties) { if (resource instanceof LibraryProperties) { libraryProperties.add((LibraryProperties) resource); } } return libraryProperties; } @Override public void done() { try { List<LibraryProperties> resources = get(); if (resources == null) { resources = new ArrayList<LibraryProperties>(); } Object[][] data = new Object[resources.size()][3]; int i = 0; for (LibraryProperties properties : resources) { data[i][SELECTED_COLUMN] = null; data[i][PROPERTIES_COLUMN] = properties; data[i][TYPE_COLUMN] = properties.getType(); i++; for (Map<String, String> resourceIds : selectedResourceIds.values()) { if (resourceIds.containsKey(properties.getId())) { resourceIds.put(properties.getId(), properties.getName()); } } } ((RefreshTableModel) resourceTable.getModel()).refreshDataVector(data); treeTable.getSelectionModel().setSelectionInterval(0, 0); treeTable.getTreeSelectionModel().setSelectionPath(treeTable.getPathForRow(0)); parent.resourcesReady(); } catch (Throwable t) { if (t instanceof ExecutionException) { t = t.getCause(); } PlatformUI.MIRTH_FRAME.alertThrowable(PlatformUI.MIRTH_FRAME, t, "Error loading library resources: " + t.toString()); } finally { PlatformUI.MIRTH_FRAME.stopWorking(workingId); } } }; worker.execute(); }
From source file:flex.messaging.services.http.HTTPProxyAdapter.java
/** {@inheritDoc} */ public Object invoke(Message msg) { HTTPMessage message = (HTTPMessage) msg; ProxyContext context = new ProxyContext(); // SOAPMessages should be sent through the SOAPProxyAdapter, but // the default destination may be just to the HTTPProxyAdapter. // We'll update the context just in case.... if (message instanceof SOAPMessage) context.setSoapRequest(true);//from w w w .j a v a2 s.com else context.setSoapRequest(false); setupContext(context, message); try { filterChain.invoke(context); //TODO: Do we want a return type that encapsulates the response data? // OUTPUT AcknowledgeMessage ack = new AcknowledgeMessage(); ack.setBody(context.getResponse()); ack.setHeader(Message.STATUS_CODE_HEADER, context.getStatusCode()); if (context.getRecordHeaders()) { ack.setHeader(REQUEST_HEADERS, context.getRequestHeaders()); ack.setHeader(RESPONSE_HEADERS, context.getResponseHeaders()); } return ack; } catch (MessageException ex) { throw ex; } catch (Throwable t) { // this should never happen- ErrorFilter should catch everything t.printStackTrace(); throw new MessageException(t.toString()); } }
From source file:interactivespaces.activity.impl.BaseActivity.java
/** * Log the activity configuration information to an activity config log file. * * @param logFile//from w w w . j a v a 2s.c o m * file to write the log into */ private void logConfiguration(String logFile) { try { StringBuilder logBuilder = new StringBuilder(); getLog().info("Logging activity configuration to " + logFile); Configuration configuration = getConfiguration(); Map<String, String> configMap = configuration.getCollapsedMap(); TreeMap<String, String> sortedMap = new TreeMap<String, String>(configMap); for (Map.Entry<String, String> entry : sortedMap.entrySet()) { String value; try { value = configuration.evaluate(entry.getValue()); } catch (Throwable e) { value = e.toString(); } logBuilder.append(String.format("%s=%s\n", entry.getKey(), value)); } File configLog = new File(getActivityFilesystem().getLogDirectory(), logFile); fileSupport.writeFile(configLog, logBuilder.toString()); } catch (Throwable e) { logException("While logging activity configuration", e); } }
From source file:net.sourceforge.fenixedu.presentationTier.util.ExceptionInformation.java
private StringBuilder headerAppend(Throwable ex) { StringBuilder exceptionInfo = new StringBuilder( "- - - - - - - - - - - Error Origin - - - - - - - - - - -\n"); exceptionInfo.append("\n[Exception] ").append(ex.toString()).append("\n\n"); setException(ex);//from w w w .j av a2s. c om setThreadName(Thread.currentThread().getName()); setFlatExceptionStack(ThrowableInfo.getFlatThrowableInfoList(ex)); return exceptionInfo; }
From source file:com.mirth.connect.server.controllers.DefaultExtensionController.java
@Override public InstallationResult extractExtension(InputStream inputStream) { Throwable cause = null;//from w w w . ja va 2s. c om Set<MetaData> metaDataSet = new HashSet<MetaData>(); File installTempDir = new File(ExtensionController.getExtensionsPath(), "install_temp"); if (!installTempDir.exists()) { installTempDir.mkdir(); } File tempFile = null; FileOutputStream tempFileOutputStream = null; ZipFile zipFile = null; try { /* * create a new temp file (in the install temp dir) to store the zip file contents */ tempFile = File.createTempFile(ServerUUIDGenerator.getUUID(), ".zip", installTempDir); // write the contents of the multipart fileitem to the temp file try { tempFileOutputStream = new FileOutputStream(tempFile); IOUtils.copy(inputStream, tempFileOutputStream); } finally { IOUtils.closeQuietly(tempFileOutputStream); } // create a new zip file from the temp file zipFile = new ZipFile(tempFile); // get a list of all of the entries in the zip file Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); String entryName = entry.getName(); if (entryName.endsWith("plugin.xml") || entryName.endsWith("destination.xml") || entryName.endsWith("source.xml")) { // parse the extension metadata xml file MetaData extensionMetaData = serializer .deserialize(IOUtils.toString(zipFile.getInputStream(entry)), MetaData.class); metaDataSet.add(extensionMetaData); if (!extensionLoader.isExtensionCompatible(extensionMetaData)) { if (cause == null) { cause = new VersionMismatchException("Extension \"" + entry.getName() + "\" is not compatible with this version of Mirth Connect."); } } } } if (cause == null) { // reset the entries and extract entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); if (entry.isDirectory()) { /* * assume directories are stored parents first then children. * * TODO: this is not robust, just for demonstration purposes. */ File directory = new File(installTempDir, entry.getName()); directory.mkdir(); } else { // otherwise, write the file out to the install temp dir InputStream zipInputStream = zipFile.getInputStream(entry); OutputStream outputStream = new BufferedOutputStream( new FileOutputStream(new File(installTempDir, entry.getName()))); IOUtils.copy(zipInputStream, outputStream); IOUtils.closeQuietly(zipInputStream); IOUtils.closeQuietly(outputStream); } } } } catch (Throwable t) { cause = new ControllerException("Error extracting extension. " + t.toString(), t); } finally { if (zipFile != null) { try { zipFile.close(); } catch (Exception e) { cause = new ControllerException(e); } } // delete the temp file since it is no longer needed FileUtils.deleteQuietly(tempFile); } return new InstallationResult(cause, metaDataSet); }
From source file:cn.webwheel.DefaultMain.java
/** * Wrap exception to a json object and return it to client. * <p>/*from www . j a v a 2 s.c om*/ * <b>json object format:</b><br/> * <p><blockquote><pre> * { * "msg": "the exception's message", * "stackTrace":[ * "exception's stack trace1", * "exception's stack trace2", * "exception's stack trace3", * .... * ] * } * </pre></blockquote></p> */ public Object executeActionError(WebContext ctx, ActionInfo ai, Object action, Throwable e) throws Throwable { if (e instanceof LogicException) { return ((LogicException) e).getResult(); } Logger.getLogger(DefaultMain.class.getName()).log(Level.SEVERE, "action execution error", e); StringBuilder sb = new StringBuilder(); sb.append("{\n"); String s; try { s = JsonResult.objectMapper.writeValueAsString(e.toString()); } catch (IOException e1) { s = "\"" + e.toString().replace("\"", "'") + "\""; } sb.append(" \"msg\" : " + s + ",\n"); sb.append(" \"stackTrace\" : ["); StringWriter sw = new StringWriter(); e.printStackTrace(new PrintWriter(sw)); String[] ss = sw.toString().split("\r\n"); for (int i = 1; i < ss.length; i++) { if (sb.charAt(sb.length() - 1) != '[') { sb.append(','); } sb.append("\n ").append(JsonResult.objectMapper.writeValueAsString(ss[i])); } sb.append("\n ]\n"); sb.append("}"); HttpServletResponse response = ctx.getResponse(); if (JsonResult.defWrapMultipart && ServletFileUpload.isMultipartContent(ctx.getRequest()) && !"XMLHttpRequest".equals(ctx.getRequest().getHeader("X-Requested-With"))) { response.setContentType("text/html"); sb.insert(0, "<textarea>\n"); sb.append("\n</textarea>"); } else { response.setContentType("application/json"); } response.setCharacterEncoding("utf-8"); response.setHeader("Cache-Control", "no-cache"); response.setHeader("Pragma", "no-cache"); response.setHeader("Expires", "-1"); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); response.getWriter().write(sb.toString()); return EmptyResult.inst; }
From source file:com.intuit.tank.runner.method.RequestRunner.java
@Override public String execute() { String validation = TankConstants.HTTP_CASE_PASS; if (APITestHarness.getInstance().getAgentRunData().getActiveProfile() == LoggingProfile.TRACE) { LOG.info(LogUtil.getLogMessage("Executing step...")); }/*from w w w . j a v a 2 s . c o m*/ if (checkPreValidations()) { if (LOG.isDebugEnabled() || APITestHarness.getInstance().getAgentRunData().getActiveProfile() == LoggingProfile.VERBOSE || APITestHarness.getInstance().getAgentRunData().getActiveProfile() == LoggingProfile.TRACE) { LOG.info(LogUtil.getLogMessage("Skipping request because of Pre-Validation.")); } return TankConstants.HTTP_CASE_PASS; } try { if (method.equalsIgnoreCase("GET")) { baseRequest.doGet(baseResponse); } else if (method.equalsIgnoreCase("POST")) { baseRequest.doPost(baseResponse); } else if (method.equalsIgnoreCase("PUT")) { baseRequest.doPut(baseResponse); } else if (method.equalsIgnoreCase("DELETE")) { baseRequest.doDelete(baseResponse); } } catch (Throwable e) { LOG.error(LogUtil.getLogMessage("Unexpected Exception executing request: " + e.toString(), LogEventType.IO), e); } if (APITestHarness.getInstance().getTPMonitor().isEnabled()) { if (!StringUtils.isEmpty(loggingKey)) { APITestHarness.getInstance().getTPMonitor().addToMap(loggingKey, baseRequest); } } validation = processValidations(variables, "", baseResponse); LogUtil.getLogEvent().setValidationStatus(validation); if (!validation.equalsIgnoreCase(TankConstants.HTTP_CASE_PASS)) { LOG.error(LogUtil.getLogMessage("Validation Failed", LogEventType.Validation, LoggingProfile.VERBOSE)); } if (APITestHarness.getInstance().getAgentRunData().getActiveProfile() == LoggingProfile.VERBOSE) { LOG.info(LogUtil.getLogMessage("Made Call ...")); } processVariables(variables, baseResponse); if (APITestHarness.getInstance().getAgentRunData().getActiveProfile() == LoggingProfile.TRACE) { LOG.info(LogUtil.getLogMessage("Variables Processed. Exiting ...")); } if (!APITestHarness.getInstance().isDebug()) { processPerfResponse(validation, uniqueName, Integer.valueOf(variables.getVariable("THREAD_ID")), baseRequest, baseResponse); } tsc.setRequest(baseRequest); tsc.setResponse(baseResponse); return validation; }
From source file:gobblin.data.management.copy.hive.HiveDatasetFinder.java
@Override public Iterator<HiveDataset> getDatasetsIterator() throws IOException { return new AbstractIterator<HiveDataset>() { private Iterator<DbAndTable> tables = getTables().iterator(); @Override/*from w w w .jav a 2 s . co m*/ protected HiveDataset computeNext() { while (this.tables.hasNext()) { DbAndTable dbAndTable = this.tables.next(); try (AutoReturnableObject<IMetaStoreClient> client = HiveDatasetFinder.this.clientPool .getClient()) { Table table = client.get().getTable(dbAndTable.getDb(), dbAndTable.getTable()); Config datasetConfig = getDatasetConfig(table); if (ConfigUtils.getBoolean(datasetConfig, HIVE_DATASET_IS_BLACKLISTED_KEY, DEFAULT_HIVE_DATASET_IS_BLACKLISTED_KEY)) { continue; } if (HiveDatasetFinder.this.eventSubmitter.isPresent()) { SlaEventSubmitter.builder().datasetUrn(dbAndTable.toString()) .eventSubmitter(HiveDatasetFinder.this.eventSubmitter.get()) .eventName(DATASET_FOUND).build().submit(); } return createHiveDataset(table, datasetConfig); } catch (Throwable t) { log.error(String.format("Failed to create HiveDataset for table %s.%s", dbAndTable.getDb(), dbAndTable.getTable()), t); if (HiveDatasetFinder.this.eventSubmitter.isPresent()) { SlaEventSubmitter.builder().datasetUrn(dbAndTable.toString()) .eventSubmitter(HiveDatasetFinder.this.eventSubmitter.get()) .eventName(DATASET_ERROR).additionalMetadata(FAILURE_CONTEXT, t.toString()) .build().submit(); } } } return endOfData(); } }; }