Example usage for java.lang StackTraceElement toString

List of usage examples for java.lang StackTraceElement toString

Introduction

In this page you can find the example usage for java.lang StackTraceElement toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of this stack trace element.

Usage

From source file:org.apache.flink.client.CliFrontend.java

/**
 * Displays an exception message./*w  ww.j  a v  a  2  s .co m*/
 * 
 * @param t The exception to display.
 * @return The return code for the process.
 */
private int handleError(Throwable t) {
    LOG.error("Error while running the command.", t);

    System.err.println();
    System.err.println("------------------------------------------------------------");
    System.err.println(" The program finished with the following exception:");
    System.err.println();

    if (t.getCause() instanceof InvalidProgramException) {
        System.err.println(t.getCause().getMessage());
        StackTraceElement[] trace = t.getCause().getStackTrace();
        for (StackTraceElement ele : trace) {
            System.err.println("\t" + ele.toString());
            if (ele.getMethodName().equals("main")) {
                break;
            }
        }
    } else {
        t.printStackTrace();
    }
    return 1;
}

From source file:com.sonicle.webtop.mail.MailManager.java

public void addExternalAccount(ExternalAccount account) throws WTException {
    Connection connection = null;
    ExternalAccountDAO dao = ExternalAccountDAO.getInstance();

    try {/*  w  w w .ja v  a 2  s. c o  m*/
        OExternalAccount externalAccount = mapToOExternalAccount().apply(account);
        connection = WT.getConnection(SERVICE_ID);
        externalAccount.setExternalAccountId(dao.getSequence(connection).intValue());

        UserProfileId userProfileId = getTargetProfileId();
        externalAccount.setDomainId(userProfileId.getDomainId());
        externalAccount.setUserId(userProfileId.getUserId());
        dao.insert(connection, externalAccount);

    } catch (SQLException | DAOException ex) {
        StackTraceElement[] els = ex.getStackTrace();
        for (StackTraceElement st : els)
            System.out.println(st.toString());
        System.out.println(ex.getCause());
        System.out.println(ex.getMessage());
        throw new WTException(ex, "DB error");
    } finally {
        DbUtils.closeQuietly(connection);
    }
}

From source file:com.streamsets.datacollector.bundles.content.SdcInfoContentGenerator.java

public void threadDump(BundleWriter writer) throws IOException {
    writer.markStartOfFile("runtime/threads.txt");

    ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean();
    ThreadInfo[] threads = threadMXBean.dumpAllThreads(true, true);

    // Sadly we can't easily do info.toString() as the implementation is hardcoded to cut the stack trace only to 8
    // items which does not serve our purpose well. Hence we have custom implementation that prints entire stack trace
    // for all threads.
    for (ThreadInfo info : threads) {
        StringBuilder sb = new StringBuilder(
                "\"" + info.getThreadName() + "\"" + " Id=" + info.getThreadId() + " " + info.getThreadState());
        if (info.getLockName() != null) {
            sb.append(" on " + info.getLockName());
        }//from w ww.ja v a  2  s.co  m
        if (info.getLockOwnerName() != null) {
            sb.append(" owned by \"" + info.getLockOwnerName() + "\" Id=" + info.getLockOwnerId());
        }
        if (info.isSuspended()) {
            sb.append(" (suspended)");
        }
        if (info.isInNative()) {
            sb.append(" (in native)");
        }
        sb.append('\n');
        int i = 0;
        for (StackTraceElement ste : info.getStackTrace()) {
            if (i == 0 && info.getLockInfo() != null) {
                Thread.State ts = info.getThreadState();
                switch (ts) {
                case BLOCKED:
                    sb.append("\t-  blocked on " + info.getLockInfo());
                    sb.append('\n');
                    break;
                case WAITING:
                    sb.append("\t-  waiting on " + info.getLockInfo());
                    sb.append('\n');
                    break;
                case TIMED_WAITING:
                    sb.append("\t-  waiting on " + info.getLockInfo());
                    sb.append('\n');
                    break;
                default:
                }
            }
            sb.append("\tat " + ste.toString());
            sb.append('\n');

            i++;

            for (MonitorInfo mi : info.getLockedMonitors()) {
                if (mi.getLockedStackDepth() == i) {
                    sb.append("\t-  locked " + mi);
                    sb.append('\n');
                }
            }
        }

        LockInfo[] locks = info.getLockedSynchronizers();
        if (locks.length > 0) {
            sb.append("\n\tNumber of locked synchronizers = " + locks.length);
            sb.append('\n');
            for (LockInfo li : locks) {
                sb.append("\t- " + li);
                sb.append('\n');
            }
        }
        sb.append('\n');

        writer.write(sb.toString());
    }

    writer.markEndOfFile();
}

From source file:org.sakaiproject.sdata.tool.JCRHandler.java

/**
 * Perform a mime multipart upload into the JCR repository based on a location specified
 * by the rp parameter. The parts of the multipart upload are relative to the current
 * request path// w  ww .  j  av a2s  .c  o  m
 *
 * @param request
 *          the request object of the current request.
 * @param response
 *          the response object of the current request
 * @param rp
 *          the resource definition for the current request
 * @throws ServletException
 * @throws IOException
 */
private void doMumtipartUpload(HttpServletRequest request, HttpServletResponse response, ResourceDefinition rp)
        throws ServletException, IOException {
    try {
        try {
            Node n = jcrNodeFactory.createFolder(rp.getRepositoryPath());
            if (n == null) {
                response.reset();
                response.sendError(HttpServletResponse.SC_BAD_REQUEST,
                        "Unable to uplaod to location " + rp.getRepositoryPath());
                return;
            }
        } catch (Exception ex) {
            sendError(request, response, ex);
            snoopRequest(request);
            LOG.error("Failed  TO service Request ", ex);
            return;
        }

        // Check that we have a file upload request

        // Create a new file upload handler
        ServletFileUpload upload = new ServletFileUpload();
        List<String> errors = new ArrayList<String>();

        // Parse the request
        FileItemIterator iter = upload.getItemIterator(request);
        Map<String, Object> responseMap = new HashMap<String, Object>();
        Map<String, Object> uploads = new HashMap<String, Object>();
        Map<String, List<String>> values = new HashMap<String, List<String>>();
        int uploadNumber = 0;
        while (iter.hasNext()) {
            FileItemStream item = iter.next();
            LOG.debug("Got Upload through Uploads");
            String name = item.getName();
            String fieldName = item.getFieldName();
            LOG.info("    Name is " + name + " field Name " + fieldName);
            for (String headerName : item.getHeaderNames()) {
                LOG.info("Header " + headerName + " is " + item.getHeader(headerName));
            }
            InputStream stream = item.openStream();
            if (!item.isFormField()) {
                try {
                    if (name != null && name.trim().length() > 0) {

                        List<String> realNames = values.get(REAL_UPLOAD_NAME);
                        String finalName = name;
                        if (realNames != null && realNames.size() > uploadNumber) {
                            finalName = realNames.get(uploadNumber);
                        }

                        String path = rp.convertToAbsoluteRepositoryPath(finalName);
                        // pooled uploads never overwrite.
                        List<String> pooled = values.get(POOLED);
                        if (pooled != null && pooled.size() > 0 && "1".equals(pooled.get(0))) {
                            path = rp.convertToAbsoluteRepositoryPath(PathUtils.getPoolPrefix(finalName));
                            int i = 0;
                            String basePath = path;
                            try {
                                while (true) {
                                    Node n = jcrNodeFactory.getNode(path);
                                    if (n == null) {
                                        break;
                                    }
                                    int lastStop = basePath.lastIndexOf('.');
                                    path = basePath.substring(0, lastStop) + "_" + i
                                            + basePath.substring(lastStop);
                                    i++;
                                }
                            } catch (JCRNodeFactoryServiceException ex) {
                                // the path does not exist which is good.
                            }
                        }

                        String mimeType = ContentTypes.getContentType(finalName, item.getContentType());
                        Node target = jcrNodeFactory.createFile(path, mimeType);
                        GregorianCalendar lastModified = new GregorianCalendar();
                        lastModified.setTime(new Date());
                        long size = saveStream(target, stream, mimeType, "UTF-8", lastModified);
                        Map<String, Object> uploadMap = new HashMap<String, Object>();
                        if (size > Integer.MAX_VALUE) {
                            uploadMap.put("contentLength", String.valueOf(size));
                        } else {
                            uploadMap.put("contentLength", (int) size);
                        }
                        uploadMap.put("name", finalName);
                        uploadMap.put("url", rp.convertToExternalPath(path));
                        uploadMap.put("mimeType", mimeType);
                        uploadMap.put("lastModified", lastModified.getTime());
                        uploadMap.put("status", "ok");

                        uploads.put(fieldName, uploadMap);
                    }
                } catch (Exception ex) {
                    LOG.error("Failed to Upload Content", ex);
                    Map<String, Object> uploadMap = new HashMap<String, Object>();
                    uploadMap.put("mimeType", "text/plain");
                    uploadMap.put("encoding", "UTF-8");
                    uploadMap.put("contentLength", -1);
                    uploadMap.put("lastModified", 0);
                    uploadMap.put("status", "Failed");
                    uploadMap.put("cause", ex.getMessage());
                    List<String> stackTrace = new ArrayList<String>();
                    for (StackTraceElement ste : ex.getStackTrace()) {
                        stackTrace.add(ste.toString());
                    }
                    uploadMap.put("stacktrace", stackTrace);
                    uploads.put(fieldName, uploadMap);
                    uploadMap = null;

                }

            } else {
                String value = Streams.asString(stream);
                List<String> valueList = values.get(name);
                if (valueList == null) {
                    valueList = new ArrayList<String>();
                    values.put(name, valueList);

                }
                valueList.add(value);
            }
        }

        responseMap.put("success", true);
        responseMap.put("errors", errors.toArray(new String[1]));
        responseMap.put("uploads", uploads);
        sendMap(request, response, responseMap);
        LOG.info("Response Complete Saved to " + rp.getRepositoryPath());
    } catch (Throwable ex) {
        LOG.error("Failed  TO service Request ", ex);
        sendError(request, response, ex);
        return;
    }
}

From source file:com.evolveum.icf.dummy.resource.DummyResource.java

private void traceOperation(String opName, long counter) {
    LOGGER.info("MONITOR dummy '{}' {} ({})", instanceName, opName, counter);
    if (LOGGER.isDebugEnabled()) {
        StackTraceElement[] fullStack = Thread.currentThread().getStackTrace();
        String immediateClass = null;
        String immediateMethod = null;
        StringBuilder sb = new StringBuilder();
        for (StackTraceElement stackElement : fullStack) {
            if (stackElement.getClassName().equals(DummyResource.class.getName())
                    || stackElement.getClassName().equals(Thread.class.getName())) {
                // skip our own calls
                continue;
            }/*  www  .  j  a  v  a 2 s.  c  om*/
            if (immediateClass == null) {
                immediateClass = stackElement.getClassName();
                immediateMethod = stackElement.getMethodName();
            }
            sb.append(stackElement.toString());
            sb.append("\n");
        }
        LOGGER.debug("MONITOR dummy '{}' {} ({}): {} {}",
                new Object[] { instanceName, opName, counter, immediateClass, immediateMethod });
        LOGGER.trace("MONITOR dummy '{}' {} ({}):\n{}", new Object[] { instanceName, opName, counter, sb });
    }
}

From source file:tools.xor.service.AggregateManager.java

private String getStackTrace(int numStackElements) {
    Exception e = new Exception();
    StringBuilder sb = new StringBuilder();

    int skip = 2; // skip first 2
    for (StackTraceElement element : e.getStackTrace()) {
        if (skip-- > 0) {
            continue;
        } else if (skip + numStackElements == 0) {
            break;
        }/*from  www.  j a  va 2 s . c om*/
        sb.append(element.toString());
        sb.append("\r\n");
    }
    return sb.toString();
}

From source file:dev.ukanth.ufirewall.Api.java

/**
 * Runs a script as root (multiple commands separated by "\n")
* @param ctx mandatory context// w  ww  . j a  v  a2s  .c o  m
 * @param script the script to be executed
 * @param res the script output response (stdout + stderr)
 * @return the script exit code
 * @throws IOException on any error executing the script, or writing it to disk
 */
public static int runScriptAsRoot(Context ctx, List<String> script, StringBuilder res) throws IOException {
    int returnCode = -1;

    if ((Looper.myLooper() != null) && (Looper.myLooper() == Looper.getMainLooper())) {
        Log.e(TAG, "runScriptAsRoot should not be called from the main thread\nCall Trace:\n");
        for (StackTraceElement e : new Throwable().getStackTrace()) {
            Log.e(TAG, e.toString());
        }
    }

    try {
        returnCode = new RunCommand().execute(script, res, ctx).get();
    } catch (RejectedExecutionException r) {
        Log.e(TAG, "runScript failed: " + r.getLocalizedMessage());
    } catch (InterruptedException e) {
        Log.e(TAG, "Caught InterruptedException");
    } catch (ExecutionException e) {
        Log.e(TAG, "runScript failed: " + e.getLocalizedMessage());
    } catch (Exception e) {
        Log.e(TAG, "runScript failed: " + e.getLocalizedMessage());
    }

    return returnCode;
}

From source file:com.gimranov.zandy.app.task.APIRequest.java

/**
 * Issues the specified request, calling its specified handler as appropriate
 *
 * This should not be run from a UI thread
 *
 * @return// w w w  . j a  v  a 2  s  .c o m
 * @throws APIException
 */
public void issue(Database db, ServerCredentials cred) throws APIException {

    URI uri;

    // Add the API key, if missing and we have it
    if (!query.contains("key=") && key != null) {
        String suffix = (query.contains("?")) ? "&key=" + key : "?key=" + key;
        query = query + suffix;
    }

    // Force lower-case
    method = method.toLowerCase();

    Log.i(TAG, "Request " + method + ": " + query);

    try {
        uri = new URI(query);
    } catch (URISyntaxException e1) {
        throw new APIException(APIException.INVALID_URI, "Invalid URI: " + query, this);
    }

    HttpClient client = new DefaultHttpClient();
    // The default implementation includes an Expect: header, which
    // confuses the Zotero servers.
    client.getParams().setParameter("http.protocol.expect-continue", false);
    // We also need to send our data nice and raw.
    client.getParams().setParameter("http.protocol.content-charset", "UTF-8");

    HttpGet get = new HttpGet(uri);
    HttpPost post = new HttpPost(uri);
    HttpPut put = new HttpPut(uri);
    HttpDelete delete = new HttpDelete(uri);

    for (HttpRequest request : Arrays.asList(get, post, put, delete)) {
        request.setHeader("Zotero-API-Version", "1");
    }

    // There are several shared initialization routines for POST and PUT
    if ("post".equals(method) || "put".equals(method)) {
        if (ifMatch != null) {
            post.setHeader("If-Match", ifMatch);
            put.setHeader("If-Match", ifMatch);
        }
        if (contentType != null) {
            post.setHeader("Content-Type", contentType);
            put.setHeader("Content-Type", contentType);
        }
        if (body != null) {
            Log.d(TAG, "Request body: " + body);
            // Force the encoding to UTF-8
            StringEntity entity;
            try {
                entity = new StringEntity(body, "UTF-8");
            } catch (UnsupportedEncodingException e) {
                throw new APIException(APIException.INVALID_UUID,
                        "UnsupportedEncodingException. This shouldn't "
                                + "be possible-- UTF-8 is certainly supported",
                        this);
            }
            post.setEntity(entity);
            put.setEntity(entity);
        }
    }

    if ("get".equals(method)) {
        if (contentType != null) {
            get.setHeader("Content-Type", contentType);
        }
    }

    /* For requests that return Atom feeds or entries (XML):
     *       ITEMS_ALL            ]
     *       ITEMS_FOR_COLLECTION   ]- Except format=keys
     *       ITEMS_CHILDREN         ]
     * 
     *       ITEM_BY_KEY
     *       COLLECTIONS_ALL
     *       ITEM_NEW
     *       ITEM_UPDATE
     *       ITEM_ATTACHMENT_NEW
     *       ITEM_ATTACHMENT_UPDATE
     */
    if ("xml".equals(disposition)) {
        XMLResponseParser parse = new XMLResponseParser(this);
        // These types will always have a temporary key that we've
        // been using locally, and which should be replaced by the
        // incoming item key.
        if (type == ITEM_NEW || type == ITEM_ATTACHMENT_NEW) {
            parse.update(updateType, updateKey);
        }

        try {
            HttpResponse hr;
            if ("post".equals(method)) {
                hr = client.execute(post);
            } else if ("put".equals(method)) {
                hr = client.execute(put);
            } else {
                // We fall back on GET here, but there really
                // shouldn't be anything else, so we throw in that case
                // for good measure
                if (!"get".equals(method)) {
                    throw new APIException(APIException.INVALID_METHOD, "Unexpected method: " + method, this);
                }
                hr = client.execute(get);
            }

            // Record the response code
            status = hr.getStatusLine().getStatusCode();
            Log.d(TAG, status + " : " + hr.getStatusLine().getReasonPhrase());

            if (status < 400) {
                HttpEntity he = hr.getEntity();
                InputStream in = he.getContent();
                parse.setInputStream(in);
                // Entry mode if the request is an update (PUT) or if it is a request
                // for a single item by key (ITEM_BY_KEY)
                int mode = ("put".equals(method) || type == APIRequest.ITEM_BY_KEY)
                        ? XMLResponseParser.MODE_ENTRY
                        : XMLResponseParser.MODE_FEED;
                try {
                    parse.parse(mode, uri.toString(), db);
                } catch (RuntimeException e) {
                    throw new RuntimeException("Parser threw exception on request: " + method + " " + query, e);
                }
            } else {
                ByteArrayOutputStream ostream = new ByteArrayOutputStream();
                hr.getEntity().writeTo(ostream);
                Log.e(TAG, "Error Body: " + ostream.toString());
                Log.e(TAG, "Request Body:" + body);

                if (status == 412) {
                    // This is: "Precondition Failed", meaning that we provided
                    // the wrong etag to update the item. That should mean that
                    // there is a conflict between what we're sending (PUT) and
                    // the server. We mark that ourselves and save the request
                    // to the database, and also notify our handler.
                    getHandler().onError(this, APIRequest.HTTP_ERROR_CONFLICT);
                } else {
                    Log.e(TAG, "Response status " + status + " : " + ostream.toString());
                    getHandler().onError(this, APIRequest.HTTP_ERROR_UNSPECIFIED);
                }
                status = getHttpStatus() + REQ_FAILING;
                recordAttempt(db);

                // I'm not sure whether we should throw here
                throw new APIException(APIException.HTTP_ERROR, ostream.toString(), this);
            }
        } catch (Exception e) {
            StringBuilder sb = new StringBuilder();
            for (StackTraceElement el : e.getStackTrace()) {
                sb.append(el.toString() + "\n");
            }
            recordAttempt(db);
            throw new APIException(APIException.HTTP_ERROR, "An IOException was thrown: " + sb.toString(),
                    this);
        }
    } // end if ("xml".equals(disposition)) {..}
    /* For requests that return non-XML data:
     *       ITEMS_ALL            ]
     *       ITEMS_FOR_COLLECTION   ]- For format=keys
     *       ITEMS_CHILDREN         ]
     * 
     * No server response:
     *       ITEM_DELETE
     *       ITEM_MEMBERSHIP_ADD
     *       ITEM_MEMBERSHIP_REMOVE
     *       ITEM_ATTACHMENT_DELETE
     * 
     * Currently not supported; return JSON:
     *       ITEM_FIELDS
     *       CREATOR_TYPES
     *       ITEM_FIELDS_L10N
     *       CREATOR_TYPES_L10N
     * 
     * These ones use BasicResponseHandler, which gives us
     * the response as a basic string. This is only appropriate
     * for smaller responses, since it means we have to wait until
     * the entire response is received before parsing it, so we
     * don't use it for the XML responses.
     * 
     * The disposition here is "none" or "raw".
     * 
     * The JSON-returning requests, such as ITEM_FIELDS, are not currently
     * supported; they should have a disposition of their own.
     */
    else {
        BasicResponseHandler brh = new BasicResponseHandler();
        String resp;

        try {
            if ("post".equals(method)) {
                resp = client.execute(post, brh);
            } else if ("put".equals(method)) {
                resp = client.execute(put, brh);
            } else if ("delete".equals(method)) {
                resp = client.execute(delete, brh);
            } else {
                // We fall back on GET here, but there really
                // shouldn't be anything else, so we throw in that case
                // for good measure
                if (!"get".equals(method)) {
                    throw new APIException(APIException.INVALID_METHOD, "Unexpected method: " + method, this);
                }
                resp = client.execute(get, brh);
            }
        } catch (IOException e) {
            StringBuilder sb = new StringBuilder();
            for (StackTraceElement el : e.getStackTrace()) {
                sb.append(el.toString() + "\n");
            }
            recordAttempt(db);
            throw new APIException(APIException.HTTP_ERROR, "An IOException was thrown: " + sb.toString(),
                    this);
        }

        if ("raw".equals(disposition)) {
            /* 
             * The output should be a newline-delimited set of alphanumeric
             * keys.
             */

            String[] keys = resp.split("\n");

            ArrayList<String> missing = new ArrayList<String>();

            if (type == ITEMS_ALL || type == ITEMS_FOR_COLLECTION) {

                // Try to get a parent collection
                // Our query looks like this:
                // /users/5770/collections/2AJUSIU9/items
                int colloc = query.indexOf("/collections/");
                int itemloc = query.indexOf("/items");
                // The string "/collections/" is thirteen characters long
                ItemCollection coll = ItemCollection.load(query.substring(colloc + 13, itemloc), db);

                if (coll != null) {
                    coll.loadChildren(db);

                    // If this is a collection's key listing, we first look
                    // for any synced keys we have that aren't in the list
                    ArrayList<String> keyAL = new ArrayList<String>(Arrays.asList(keys));
                    ArrayList<Item> notThere = coll.notInKeys(keyAL);
                    // We should then remove those memberships
                    for (Item i : notThere) {
                        coll.remove(i, true, db);
                    }
                }

                ArrayList<Item> recd = new ArrayList<Item>();
                for (int j = 0; j < keys.length; j++) {
                    Item got = Item.load(keys[j], db);
                    if (got == null) {
                        missing.add(keys[j]);
                    } else {
                        // We can update the collection membership immediately
                        if (coll != null)
                            coll.add(got, true, db);
                        recd.add(got);
                    }
                }

                if (coll != null) {
                    coll.saveChildren(db);
                    coll.save(db);
                }

                Log.d(TAG, "Received " + keys.length + " keys, " + missing.size() + " missing ones");
                Log.d(TAG, "Have " + (double) recd.size() / keys.length + " of list");

                if (recd.size() == keys.length) {
                    Log.d(TAG, "No new items");
                    succeeded(db);
                } else if ((double) recd.size() / keys.length < REREQUEST_CUTOFF) {
                    Log.d(TAG, "Requesting full list");
                    APIRequest mReq;
                    if (type == ITEMS_FOR_COLLECTION) {
                        mReq = fetchItems(coll, false, cred);
                    } else {
                        mReq = fetchItems(false, cred);
                    }

                    mReq.status = REQ_NEW;
                    mReq.save(db);
                } else {
                    Log.d(TAG, "Requesting " + missing.size() + " items one by one");
                    APIRequest mReq;
                    for (String key : missing) {
                        // Queue request for the missing key
                        mReq = fetchItem(key, cred);
                        mReq.status = REQ_NEW;
                        mReq.save(db);
                    }
                    // Queue request for the collection again, by key
                    // XXX This is not the best way to make sure these
                    // items are put in the correct collection.
                    if (type == ITEMS_FOR_COLLECTION) {
                        fetchItems(coll, true, cred).save(db);
                    }
                }
            } else if (type == ITEMS_CHILDREN) {
                // Try to get a parent item
                // Our query looks like this:
                // /users/5770/items/2AJUSIU9/children
                int itemloc = query.indexOf("/items/");
                int childloc = query.indexOf("/children");
                // The string "/items/" is seven characters long
                Item item = Item.load(query.substring(itemloc + 7, childloc), db);

                ArrayList<Attachment> recd = new ArrayList<Attachment>();
                for (int j = 0; j < keys.length; j++) {
                    Attachment got = Attachment.load(keys[j], db);
                    if (got == null)
                        missing.add(keys[j]);
                    else
                        recd.add(got);
                }

                if ((double) recd.size() / keys.length < REREQUEST_CUTOFF) {
                    APIRequest mReq;
                    mReq = cred.prep(children(item));
                    mReq.status = REQ_NEW;
                    mReq.save(db);
                } else {
                    APIRequest mReq;
                    for (String key : missing) {
                        // Queue request for the missing key
                        mReq = fetchItem(key, cred);
                        mReq.status = REQ_NEW;
                        mReq.save(db);
                    }
                }
            }
        } else if ("json".equals(disposition)) {
            // TODO
        } else {
            /* Here, disposition should be "none" */
            // Nothing to be done.
        }

        getHandler().onComplete(this);
    }
}

From source file:com.opengamma.masterdb.batch.DbBatchWriter.java

@SuppressWarnings("unchecked")
public synchronized void addJobResultsInTransaction(TransactionStatus transactionStatus, ObjectId runId,
        ViewComputationResultModel resultModel) {
    ArgumentChecker.notNull(runId, "runId");
    ArgumentChecker.notNull(resultModel, "resultModel");

    final long riskRunId = extractOid(runId);
    ArgumentChecker.notNull(riskRunId, "riskRunId");

    Map<ComputeFailureKey, ComputeFailure> computeFailureCache = _computeFailureCacheByRunId.get(riskRunId);
    Map<Pair<Long, Long>, StatusEntry> statusCache = _statusCacheByRunId.get(riskRunId);

    Map<ValueSpecification, BatchResultWriterFailure> errorCache = populateErrorCache(computeFailureCache,
            resultModel.getAllResults());

    RiskRun run = _riskRunsByIds.get(riskRunId);
    if (run.getSnapshotMode().equals(SnapshotMode.WRITE_THROUGH)) {
        addComputedValuesToMarketDataInTransaction(run.getMarketData().getObjectId(),
                resultModel.getAllMarketData());
    }//from ww  w.  j a  v a2s. c o m

    for (String calcConfigName : resultModel.getCalculationConfigurationNames()) {
        ViewCalculationResultModel viewCalculationResultModel = resultModel
                .getCalculationResult(calcConfigName);

        final Set<ComputationTargetSpecification> successfulTargets = newHashSet();
        final Set<ComputationTargetSpecification> failedTargets = newHashSet();

        List<SqlParameterSource> targetProperties = newArrayList();
        List<SqlParameterSource> successes = newArrayList();
        List<SqlParameterSource> failures = newArrayList();
        List<SqlParameterSource> failureReasons = newArrayList();

        Instant evalInstant = Instant.now();

        long calcConfId = _calculationConfigurations.get(calcConfigName);

        for (final ComputationTargetSpecification targetSpec : viewCalculationResultModel.getAllTargets()) {
            final long computationTargetId = _computationTargets.get(targetSpec);
            boolean specFailures = false;
            for (final ComputedValueResult computedValue : viewCalculationResultModel
                    .getAllValues(targetSpec)) {
                ResultConverter<Object> resultConverter = null;
                if (!(computedValue.getValue() instanceof MissingValue)) {
                    try {
                        resultConverter = (ResultConverter<Object>) _resultConverterCache
                                .getConverter(computedValue.getValue());
                    } catch (IllegalArgumentException e) {
                        s_logger.info("No converter for value of type " + computedValue.getValue().getClass()
                                + " for " + computedValue.getSpecification());
                    }
                }

                final ValueSpecification specification = computedValue.getSpecification();
                if (!_riskValueSpecifications.containsKey(specification)) {
                    s_logger.error("Unexpected result specification " + specification
                            + ". Result cannot be written. Result value was " + computedValue.getValue());
                    continue;
                }
                final long valueSpecificationId = _riskValueSpecifications.get(specification);
                final long functionUniqueId = getFunctionUniqueIdInTransaction(
                        specification.getFunctionUniqueId()).getId();
                final long computeNodeId = getOrCreateComputeNode(computedValue.getComputeNodeId()).getId();

                if (resultConverter != null
                        && computedValue.getInvocationResult() == InvocationResult.SUCCESS) {
                    s_logger.debug("Writing value {} for value spec {}", computedValue.getValue(),
                            specification);
                    Map<String, Double> valueAsDoublesMap = resultConverter
                            .convert(computedValue.getSpecification().getValueName(), computedValue.getValue());
                    for (Map.Entry<String, Double> valueEntry : valueAsDoublesMap.entrySet()) {
                        final String valueName = valueEntry.getKey();
                        final Double doubleValue = ensureDatabasePrecision(valueEntry.getValue());
                        final long successId = nextId(RSK_SEQUENCE_NAME);
                        successes.add(getSuccessArgs(successId, riskRunId, evalInstant, calcConfId,
                                computationTargetId, valueSpecificationId, functionUniqueId, computeNodeId,
                                valueName, doubleValue));
                    }
                } else {
                    s_logger.info("Writing failure for {} with invocation result {}, {} ",
                            newArray(computedValue.getSpecification(), computedValue.getInvocationResult(),
                                    computedValue.getAggregatedExecutionLog()));
                    specFailures = true;

                    final long failureId = nextId(RSK_SEQUENCE_NAME);
                    failures.add(getFailureArgs(failureId, riskRunId, evalInstant, calcConfId,
                            computationTargetId, valueSpecificationId, functionUniqueId, computeNodeId,
                            specification.getValueName()));

                    BatchResultWriterFailure cachedFailure = errorCache.get(specification);
                    if (cachedFailure != null) {
                        for (long computeFailureId : cachedFailure.getComputeFailureIds()) {
                            ArgumentChecker.notNull(computeFailureId, "computeFailureId");
                            final long failureReasonId = nextId(RSK_SEQUENCE_NAME);
                            failureReasons
                                    .add(getFailureReasonArgs(failureReasonId, failureId, computeFailureId));
                        }
                    }
                }
            }
            StatusEntry.Status status = getStatus(statusCache, calcConfigName, targetSpec);
            if (specFailures || status == StatusEntry.Status.FAILURE) {
                successfulTargets.remove(targetSpec);
                failedTargets.add(targetSpec);
            } else {
                successfulTargets.add(targetSpec);
            }

            // storing target data
            ComputationTarget computationTarget = _computationTargetResolver.resolve(targetSpec,
                    VersionCorrection.LATEST);
            Object targetValue = computationTarget.getValue();
            if (targetValue instanceof Bean) {
                Bean bean = (Bean) targetValue;
                for (String propertyName : bean.propertyNames()) {
                    Property<Object> property = bean.property(propertyName);
                    final long targetPropertyId = nextId(RSK_SEQUENCE_NAME);
                    targetProperties.add(getTargetPropertyArgs(targetPropertyId, computationTargetId,
                            propertyName, property.get() == null ? "NULL" : property.get().toString()));
                }
            }
        }

        if (successes.isEmpty() && failures.isEmpty() && failureReasons.isEmpty() && successfulTargets.isEmpty()
                && failedTargets.isEmpty()) {
            s_logger.debug("Nothing to write to DB for {}", resultModel);
            return;
        }

        Object preSuccessSavepoint = transactionStatus.createSavepoint();
        try {
            getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertRiskSuccess"),
                    successes.toArray(new DbMapSqlParameterSource[successes.size()]));
        } catch (Exception e) {
            s_logger.error("Failed to write successful calculations to batch database. Converting to failures.",
                    e);
            transactionStatus.rollbackToSavepoint(preSuccessSavepoint);
            if (!successes.isEmpty()) {
                String exceptionClass = e.getClass().getName();
                String exceptionMsg = e.getMessage();
                final StringBuilder buffer = new StringBuilder();
                for (StackTraceElement element : e.getStackTrace()) {
                    buffer.append(element.toString()).append("\n");
                }
                final String stackTrace = buffer.toString();
                for (SqlParameterSource success : successes) {
                    failures.add(convertSuccessToFailure(success));
                    final long failureId = getId(success);
                    final long functionId = getFunctionId(success);
                    ComputeFailureKey computeFailureKey = new ComputeFailureKey(String.valueOf(functionId),
                            exceptionClass, exceptionMsg, stackTrace);
                    ComputeFailure computeFailure = getComputeFailureFromDb(computeFailureCache,
                            computeFailureKey);
                    final long failureReasonId = nextId(RSK_SEQUENCE_NAME);
                    failureReasons
                            .add(getFailureReasonArgs(failureReasonId, failureId, computeFailure.getId()));
                }
                failedTargets.addAll(successfulTargets);
                successes.clear();
                successfulTargets.clear();
                targetProperties.clear();
            }
        }
        Object preTargetPropertiesFailureSavepoint = transactionStatus.createSavepoint();
        try {
            getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertTargetProperties"),
                    targetProperties.toArray(new DbMapSqlParameterSource[targetProperties.size()]));
        } catch (Exception e) {
            s_logger.error("Failed to write target properties to batch database", e);
            transactionStatus.rollbackToSavepoint(preTargetPropertiesFailureSavepoint);
        }
        Object preFailureSavepoint = transactionStatus.createSavepoint();
        try {
            getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertRiskFailure"),
                    failures.toArray(new DbMapSqlParameterSource[failures.size()]));
            getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertRiskFailureReason"),
                    failureReasons.toArray(new DbMapSqlParameterSource[failureReasons.size()]));
        } catch (Exception e) {
            s_logger.error("Failed to write failures to batch database", e);
            transactionStatus.rollbackToSavepoint(preFailureSavepoint);
        }

        updateStatusEntries(riskRunId, statusCache, calcConfigName, StatusEntry.Status.SUCCESS,
                successfulTargets);
        updateStatusEntries(riskRunId, statusCache, calcConfigName, StatusEntry.Status.FAILURE, failedTargets);
    }
}

From source file:org.evosuite.utils.GenericClass.java

/** {@inheritDoc} */
@Override/*from   w w  w .  j av a 2 s  .  c  o m*/
public String toString() {
    if (type == null) {
        LoggingUtils.getEvoLogger().info("Type is null for raw class {}", rawClass);
        for (StackTraceElement elem : Thread.currentThread().getStackTrace()) {
            LoggingUtils.getEvoLogger().info(elem.toString());
        }
        assert (false);
    }
    return type.toString();
}