List of usage examples for java.lang StackTraceElement toString
public String toString()
From source file:dk.netarkivet.common.utils.EMailNotifications.java
/** * Send mailNotications. //from www. j a v a2 s. com * @param message The message body itself * @param eventType Type of notification * @param e An exception (can be null) */ private void sendMailNotifications(String message, NotificationType eventType, Throwable e) { String subjectPrefix = SUBJECT_PREFIX + "-" + eventType + ": "; // Subject is a specified string + first line of error message String subject = subjectPrefix + message.split("\n")[0]; // Body consists of four parts. StringBuffer body = new StringBuffer(); // 1: The host of the message body.append("Host: " + SystemUtils.getLocalHostName() + "\n"); body.append("Date: " + new Date().toString() + "\n"); // 2: The origin of the message, found by inspecting stack trace for (StackTraceElement elm : Thread.currentThread().getStackTrace()) { if (!elm.toString().startsWith(getClass().getName()) && !elm.toString().startsWith(Notifications.class.getName()) && !elm.toString().startsWith(Thread.class.getName())) { body.append(elm.toString() + "\n"); break; } } // 3: The given message body.append(message + "\n"); // 4: Optionally the exception if (e != null) { body.append(ExceptionUtils.getStackTrace(e)); } try { // Send the mail EMailUtils.sendEmail(MAIL_RECEIVER, MAIL_SENDER, subject, body.toString()); //Log as error log.error("Mailing " + subjectPrefix + message, e); } catch (Exception e1) { // On trouble: Log and print it to system out, it's the best we can // do! String msg = "Could not send email on " + eventType.toString().toLowerCase() + " notification:\n" + body.toString() + "\n"; System.err.println(msg); e1.printStackTrace(System.err); log.error(msg, e1); } }
From source file:kzht.gm.p6spy.extension.logging.Log4jLoggerEx.java
/** * (Hibernate) ??????????<br />/* ww w.j a v a 2 s. c o m*/ * ???? : "SessionImpl" ???????????? * * @return ????? <code>true</code>, ???????? <code>false</code> * ?? */ private boolean isApplicationQuery() { for (StackTraceElement ste : new Throwable().getStackTrace()) { if (ste.toString().indexOf("SessionImpl") >= 0) { return true; } } return false; }
From source file:org.trianacode.TrianaCloud.Utils.TrianaCloudServlet.java
protected String createStackTrace(Throwable t) { StringBuffer stack = new StringBuffer("Message:" + t.getMessage() + "<br/>"); StackTraceElement[] trace = t.getStackTrace(); for (StackTraceElement element : trace) { stack.append(element.toString()).append("<br/>"); }/*ww w.j av a 2 s . c om*/ return stack.toString(); }
From source file:com.thoughtworks.go.server.service.support.ThreadInformationProvider.java
private Object asJSON(StackTraceElement[] stackTrace) { ArrayList<String> strings = new ArrayList<>(); for (StackTraceElement o : stackTrace) { strings.add(o.toString()); }/* w ww . j av a2 s . c o m*/ return strings; }
From source file:Data.c_PriceDB.java
public boolean loadPricesDB(ActionListener listener) { boolean success = true; try {// w ww .j a v a2 s . c o m FileUtils.copyURLToFile(new URL("http://www.magictraders.com/pricelists/current-magic-excel.txt"), new File(PRICES_FILE)); listener.actionPerformed(new ActionEvent(this, Action.ACTION_FILE_LOAD_DONE, "")); success = updatePrices(listener, PRICES_FILE); } catch (Exception ex) { for (StackTraceElement elem : ex.getStackTrace()) { System.err.print(elem.toString() + "\n"); } success = false; } return success; }
From source file:com.cloud.utils.log.CglibThrowableRenderer.java
/** * This method adds the stack traces retrieved from {@link Throwable#getStackTrace()} * The maxNumberOfStack attribute indicates the number of stacks that will be added, * if that value is 0, then all of the stack traces will be added, otherwise the stack traces will be limited to that number * @param th//from w w w . j a v a2 s . co m * @param lines * @param maxNumberOfStack */ private void addStackTraceToList(Throwable th, List<String> lines, int maxNumberOfStack) { StackTraceElement[] elements = th.getStackTrace(); if (maxNumberOfStack == 0 || maxNumberOfStack > elements.length) { maxNumberOfStack = elements.length; } for (int i = 0; i < maxNumberOfStack; i++) { StackTraceElement element = elements[i]; if (StringUtils.contains(element.getClassName(), "net.sf.cglib.proxy")) { continue; } lines.add("\tat " + element.toString()); } if (maxNumberOfStack < elements.length) { lines.add("\t... " + (elements.length - maxNumberOfStack) + " more"); } }
From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SigmaJob.java
public Double call() throws NectarException { double value = 0; JobControl jobControl = new JobControl("sigmajob"); try {/*w ww .ja va2 s.c o m*/ job = new Job(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } job.setJarByClass(SigmaJob.class); log.info("Sigma Job initialized"); log.warn("Sigma job: Processing...Do not terminate/close"); log.debug("Sigma job: Mapping process started"); try { ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, LongWritable.class, Text.class, NullWritable.class, Text.class, job.getConfiguration()); ChainMapper.addMapper(job, SigmaMapper.class, NullWritable.class, Text.class, Text.class, DoubleWritable.class, job.getConfiguration()); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } job.getConfiguration().set("fields.spec", "" + column); job.setReducerClass(DoubleSumReducer.class); try { FileInputFormat.addInputPath(job, new Path(inputFilePath)); fs = FileSystem.get(job.getConfiguration()); if (!fs.exists(new Path(inputFilePath))) { throw new NectarException("Exception occured:File " + inputFilePath + " not found "); } } catch (Exception e2) { // TODO Auto-generated catch block String trace = new String(); log.error(e2.toString()); for (StackTraceElement s : e2.getStackTrace()) { trace += "\n\t at " + s.toString(); } log.debug(trace); log.debug("Sigma Job terminated abruptly\n"); throw new NectarException(); } FileOutputFormat.setOutputPath(job, new Path(outputFilePath)); job.setMapOutputValueClass(DoubleWritable.class); job.setMapOutputKeyClass(Text.class); job.setInputFormatClass(TextInputFormat.class); log.debug("Sigma job: Mapping process completed"); log.debug("Sigma job: Reducing process started"); try { controlledJob = new ControlledJob(job.getConfiguration()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } jobControl.addJob(controlledJob); Thread thread = new Thread(jobControl); thread.start(); while (!jobControl.allFinished()) { try { Thread.sleep(10000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } try { FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000")); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in)); String valueLine = bufferedReader.readLine(); String[] fields = valueLine.split("\t"); value = Double.parseDouble(fields[1]); bufferedReader.close(); in.close(); } catch (IOException e) { // TODO Auto-generated catch block log.error("Exception occured: Output file cannot be read."); log.debug(e.getMessage()); log.debug("Sigma Job terminated abruptly\n"); throw new NectarException(); } log.debug("Sigma job: Reducing process completed"); log.info("Sigma Job completed\n"); return value; }
From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SigmaSqJob.java
public Double call() throws NectarException { // TODO Auto-generated method stub double value = 0; JobControl jobControl = new JobControl("sigmajob"); try {// w w w. j a v a 2 s . c o m job = new Job(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } job.setJarByClass(SigmaSqJob.class); log.info("Sigma square Job initialized"); log.warn("Sigma square job: Processing...Do not terminate/close"); log.debug("Sigma square job: Mapping process started"); try { ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, DoubleWritable.class, Text.class, NullWritable.class, Text.class, job.getConfiguration()); ChainMapper.addMapper(job, SigmaSqMapper.class, NullWritable.class, Text.class, Text.class, DoubleWritable.class, job.getConfiguration()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } job.getConfiguration().set("fields.spec", "" + column); job.setReducerClass(DoubleSumReducer.class); try { FileInputFormat.addInputPath(job, new Path(inputFilePath)); fs = FileSystem.get(job.getConfiguration()); if (!fs.exists(new Path(inputFilePath))) { throw new NectarException("Exception occured:File " + inputFilePath + " not found "); } } catch (Exception e) { // TODO Auto-generated catch block String trace = new String(); log.error(e.toString()); for (StackTraceElement s : e.getStackTrace()) { trace += "\n\t at " + s.toString(); } log.debug(trace); log.debug("Sigma square Job terminated abruptly\n"); throw new NectarException(); } FileOutputFormat.setOutputPath(job, new Path(outputFilePath)); job.setOutputKeyClass(Text.class); job.setOutputValueClass(DoubleWritable.class); job.setInputFormatClass(TextInputFormat.class); log.debug("Sigma square job: Mapping process completed"); log.debug("Sigma square job: Reducing process started"); try { controlledJob = new ControlledJob(job.getConfiguration()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } jobControl.addJob(controlledJob); Thread thread = new Thread(jobControl); thread.start(); while (!jobControl.allFinished()) { try { Thread.sleep(10000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } jobControl.stop(); try { fs = FileSystem.get(job.getConfiguration()); FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000")); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in)); String valueLine = bufferedReader.readLine(); String[] fields = valueLine.split("\t"); value = Double.parseDouble(fields[1]); bufferedReader.close(); in.close(); } catch (IOException e) { log.error("Exception occured: Output file cannot be read."); log.debug(e.getMessage()); log.debug("Sigma square Job terminated abruptly\n"); throw new NectarException(); } log.debug("Sigma square job: Reducing process completed"); log.info("Sigma square Job completed\n"); return value; }
From source file:org.fenixedu.treasury.services.payments.sibs.SIBSPaymentsImporter.java
protected String getMessage(Exception ex) { String message = ex.getMessage() == null ? ex.getClass().getSimpleName() : ex.getMessage(); message += "\n"; for (StackTraceElement el : ex.getStackTrace()) { message = message + el.toString() + "\n"; }//from w ww . j av a 2 s . com return message; }
From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.MeanJob.java
public Double call() throws NectarException { double value = 0; JobControl jobControl = new JobControl("mean job"); try {// w ww . j ava2s . c o m job = new Job(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } job.setJarByClass(MeanJob.class); log.info("Mean Job initialized"); log.warn("Mean job: Processing...Do not terminate/close"); log.debug("Mean job: Mapping process started"); try { ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, DoubleWritable.class, Text.class, NullWritable.class, Text.class, job.getConfiguration()); ChainMapper.addMapper(job, MeanMapper.class, NullWritable.class, Text.class, Text.class, DoubleWritable.class, job.getConfiguration()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } job.getConfiguration().set("fields.spec", "" + column); job.getConfiguration().setInt("n", n); job.setReducerClass(DoubleSumReducer.class); try { FileInputFormat.addInputPath(job, new Path(inputFilePath)); fs = FileSystem.get(job.getConfiguration()); if (!fs.exists(new Path(inputFilePath))) { throw new NectarException("Exception occured:File " + inputFilePath + " not found "); } } catch (Exception e) { // TODO Auto-generated catch block String trace = new String(); log.error(e.toString()); for (StackTraceElement s : e.getStackTrace()) { trace += "\n\t at " + s.toString(); } log.debug(trace); log.debug("Mean Job terminated abruptly\n"); throw new NectarException(); } FileOutputFormat.setOutputPath(job, new Path(outputFilePath)); job.setOutputKeyClass(Text.class); job.setOutputValueClass(DoubleWritable.class); job.setInputFormatClass(TextInputFormat.class); log.debug("Mean job: Mapping process completed"); log.debug("Mean job: Reducing process started"); try { controlledJob = new ControlledJob(job.getConfiguration()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } jobControl.addJob(controlledJob); Thread thread = new Thread(jobControl); thread.start(); while (!jobControl.allFinished()) { try { Thread.sleep(10000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } jobControl.stop(); try { FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000")); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in)); String valueLine = bufferedReader.readLine(); String[] fields = valueLine.split("\t"); value = Double.parseDouble(fields[1]); bufferedReader.close(); in.close(); } catch (IOException e) { log.error("Exception occured: Output file cannot be read."); log.debug(e.getMessage()); log.debug("Mean Job terminated abruptly\n"); throw new NectarException(); } log.debug("Mean job: Reducing process completed"); log.info("Mean Job completed\n"); return value; }