Example usage for javax.servlet ServletOutputStream println

List of usage examples for javax.servlet ServletOutputStream println

Introduction

In this page you can find the example usage for javax.servlet ServletOutputStream println.

Prototype


public void println(double d) throws IOException 

Source Link

Document

Writes a double value to the client, followed by a carriage return-line feed (CRLF).

Usage

From source file:org.inbio.ait.web.ajax.controller.QueryController.java

/**
 * Write the XML to be parsed on the analysis view
 * Case: three parameters (1)//  www .j  av a2s.  c om
 * @param request
 * @param response
 * @param totalMatch
 * @param matchesByPolygon
 * @return
 * @throws java.lang.Exception
 */
private ModelAndView writeReponse1(HttpServletRequest request, HttpServletResponse response,
        List<Node> matchesByPolygon, List<Node> matchesByIndicator, Long totalMatches, Long totalPercentage)
        throws Exception {

    response.setCharacterEncoding("ISO-8859-1");
    response.setContentType("text/xml");
    ServletOutputStream out = response.getOutputStream();

    StringBuilder result = new StringBuilder();
    result.append("<?xml version='1.0' encoding='ISO-8859-1'?><response>");
    result.append("<type>1</type>");
    result.append("<total>" + totalMatches + "</total>");
    result.append("<totalp>" + totalPercentage + "</totalp>");
    for (Node mp : matchesByPolygon) {
        result.append("<bypolygon>");
        result.append("<abs>" + mp.getValue1() + "</abs>");
        result.append("<per>" + mp.getValue2() + "</per>");
        result.append("</bypolygon>");
    }
    for (Node mi : matchesByIndicator) {
        result.append("<byindicator>");
        result.append("<abs>" + mi.getValue1() + "</abs>");
        result.append("<per>" + mi.getValue2() + "</per>");
        result.append("</byindicator>");
    }
    result.append("</response>");

    out.println(result.toString());
    out.flush();
    out.close();

    return null;
}

From source file:org.ngrinder.perftest.controller.PerfTestController.java

/**
 * Show the given log for the perf test having the given id.
 *
 * @param user     user// w w  w. j a v a2  s . com
 * @param id       test id
 * @param path     path in the log folder
 * @param response response
 */
@RequestMapping(value = "/{id}/show_log/**")
public void showLog(User user, @PathVariable("id") long id, @RemainedPath String path,
        HttpServletResponse response) {
    getOneWithPermissionCheck(user, id, false);
    File targetFile = perfTestService.getLogFile(id, path);
    response.reset();
    response.setContentType("text/plain");
    response.setCharacterEncoding("UTF-8");
    FileInputStream fileInputStream = null;
    try {
        fileInputStream = new FileInputStream(targetFile);
        ServletOutputStream outputStream = response.getOutputStream();
        if (FilenameUtils.isExtension(targetFile.getName(), "zip")) {
            // Limit log view to 1MB
            outputStream.println(" Only the last 1MB of a log shows.\n");
            outputStream
                    .println("==========================================================================\n\n");
            LogCompressUtils.decompress(fileInputStream, outputStream, 1 * 1024 * 1204);
        } else {
            IOUtils.copy(fileInputStream, outputStream);
        }
    } catch (Exception e) {
        CoreLogger.LOGGER.error("Error while processing log. {}", targetFile, e);
    } finally {
        IOUtils.closeQuietly(fileInputStream);
    }
}

From source file:org.tap4j.plugin.TapResult.java

public void doDownloadAttachment(StaplerRequest request, StaplerResponse response) {
    String f = request.getParameter("f");
    String key = request.getParameter("key");
    try {/*from ww w. j a v a 2s  .c  o m*/
        FilePath tapDir = new FilePath(new FilePath(new File(build.getRootDir(), Constants.TAP_DIR_NAME)), f);
        ServletOutputStream sos = response.getOutputStream();
        if (tapDir.exists()) {
            String tapStream = tapDir.readToString();
            TapConsumer consumer = TapConsumerFactory.makeTap13YamlConsumer();
            TestSet ts = consumer.load(tapStream);

            TapAttachment attachment = getAttachment(ts, key);
            if (attachment != null) {
                response.setContentType("application/force-download");
                //response.setContentLength((int)tapDir.length());
                response.setContentLength(attachment.getSize());
                response.setHeader("Content-Transfer-Encoding", "binary");
                response.setHeader("Content-Disposition",
                        "attachment; filename=\"" + attachment.getFileName() + "\"");//fileName);

                sos.write(attachment.getContent());
                sos.print('\n');
            } else {
                sos.println("Couldn't locate attachment in YAMLish: " + f);
            }
        } else {
            sos.println("Couldn't read FilePath.");
        }
    } catch (IOException e) {
        e.printStackTrace();
    } catch (InterruptedException e) {
        e.printStackTrace();
    } finally {
    }
}

From source file:org.LexGrid.LexBIG.caCore.web.util.LexEVSHTTPUtils.java

/**
 * Prints results on screen/*  www.  j  a  va2 s .com*/
 * @param resultList
 * @param response
 * @throws IOException
 * @throws ServletException
 */
public void printResults(HttpServletResponse response) throws IOException, ServletException {

    response.setContentType("text/html");
    ServletOutputStream out = response.getOutputStream();
    out.println("<br><font color=purple><b>");
    out.println("<b>" + results.size() + " records found. </b><br><hr>");
    int recordNum = 1;
    out.println();
    out.println("<font size=4 color=black> Criteria : " + this.getCriteria() + "</font>");
    if (results.size() > 0)
        out.println("<br><font size=4 color=black> Result Class name: " + results.get(0).getClass().getName()
                + "</font>");
    else
        out.println("<br><font size=4 color=black>No records found </font>");
    out.println("<br><hr><br>");
    out.println("<TABLE BORDER=\"2\"  style=\"table-layout:AUTO\" valign=\"top\">");
    try {

        for (int i = 0; i < results.size(); i++) {

            printRecord(results.get(i), servletName, out, recordNum);

            recordNum++;
        }
    } catch (Exception ex) {
        log.error("Exception: ", ex);
        throw new IOException(ex.getMessage());
    }
}

From source file:org.openxdata.server.servlet.DataImportServlet.java

@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    ServletOutputStream out = response.getOutputStream();
    try {//  w w  w .j av  a2s.com
        // authenticate user
        User user = getUser(request.getHeader("Authorization"));
        if (user != null) {
            log.info("authenticated user:");
            // check msisdn
            String msisdn = request.getParameter("msisdn");
            if (msisdn != null && !msisdn.equals("")) {
                // if an msisdn is sent, then we retrieve the user with that phone number
                authenticateUserBasedOnMsisd(msisdn);
            }

            // can be empty or null, then the default is used. this parameter is a key in the settings table indicating the classname of the serializer to use
            String serializer = request.getParameter("serializer");

            // input stream
            // first byte contains number of forms (x)
            // followed by x number of UTF strings (use writeUTF method in DataOutput)
            formDownloadService.submitForms(request.getInputStream(), out, serializer);

        } else {
            response.setHeader("WWW-Authenticate", "BASIC realm=\"openxdata\"");
            response.sendError(HttpServletResponse.SC_UNAUTHORIZED);
        }
    } catch (UserNotFoundException userNotFound) {
        out.println("Invalid msisdn");
        response.sendError(HttpServletResponse.SC_BAD_REQUEST);
        return;
    } catch (Exception e) {
        log.error("Could not import data", e);
        out.println(e.getMessage());
        response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
    } finally {
        out.close();
    }
}

From source file:com.indeed.imhotep.web.QueryServlet.java

private void handleDescribeDataset(HttpServletRequest req, HttpServletResponse resp,
        DescribeStatement parsedQuery) throws IOException {
    final ServletOutputStream outputStream = resp.getOutputStream();
    final String dataset = parsedQuery.dataset;
    final DatasetMetadata datasetMetadata = metadata.getDataset(dataset);
    final boolean json = req.getParameter("json") != null;
    if (json) {/*from w  w  w . j av  a  2s .c o m*/
        resp.setContentType(MediaType.APPLICATION_JSON_VALUE);
        final ObjectMapper mapper = new ObjectMapper();
        mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
        final ObjectNode jsonRoot = mapper.createObjectNode();
        datasetMetadata.toJSON(jsonRoot, mapper, false);

        mapper.writeValue(outputStream, jsonRoot);
    } else {
        for (FieldMetadata field : datasetMetadata.getFields().values()) {
            final String description = Strings.nullToEmpty(field.getDescription());
            outputStream.println(field.getName() + "\t" + description);
        }
    }
    outputStream.close();
}

From source file:com.indeed.imhotep.web.QueryServlet.java

private void handleShowStatement(final HttpServletRequest req, final HttpServletResponse resp)
        throws IOException {
    final ServletOutputStream outputStream = resp.getOutputStream();
    final boolean json = req.getParameter("json") != null;

    if (json) {/*from   w  ww.  jav a  2s  .  co  m*/
        resp.setContentType(MediaType.APPLICATION_JSON_VALUE);
        final ObjectMapper mapper = new ObjectMapper();
        final ObjectNode jsonRoot = mapper.createObjectNode();
        final ArrayNode array = mapper.createArrayNode();
        jsonRoot.put("datasets", array);
        for (DatasetMetadata dataset : metadata.getDatasets().values()) {
            final ObjectNode datasetInfo = mapper.createObjectNode();
            dataset.toJSON(datasetInfo, mapper, true);
            array.add(datasetInfo);
        }
        mapper.writeValue(outputStream, jsonRoot);
    } else {
        for (DatasetMetadata dataset : metadata.getDatasets().values()) {
            outputStream.println(dataset.getName());
        }
    }
    outputStream.close();
}

From source file:org.LexGrid.LexBIG.caCore.web.util.LexEVSHTTPUtils.java

/**
 * Displays a record on screen/*w  w  w  .  j a  v  a 2  s  . c om*/
 * @param result
 * @param servletName
 * @param out
 * @param recordNum
 * @throws Exception
 */
private void printRecord(Object result, String servletName, ServletOutputStream out, int recordNum)
        throws Exception {

    Class resultClass = result.getClass();
    String className = resultClass.getName();
    Class superClass = resultClass.getSuperclass();

    Field[] fields = classCache.getAllFields(resultClass);
    Field[] superFields = classCache.getAllFields(superClass);

    if (recordNum == 1) {
        out.println("<TR BGCOLOR=\"#E3E4FA\">");
        for (int x = 0; x < fields.length; x++) {
            String fName = fields[x].getName();
            if (!fName.equalsIgnoreCase("serialVersionUID")) {
                out.println("<TD>" + fName + "</TD>");
            }

        }
        for (int x = 0; x < superFields.length; x++) {
            String fName = superFields[x].getName();
            if (!fName.equalsIgnoreCase("serialVersionUID")) {
                out.println("<TD>" + fName + "</TD>");
            }

        }
        out.println("</TR>");
    }
    out.println("<TR VALIGN=\"TOP\">");

    Field idField = null;
    String criteriaIdValue = null;

    for (int f = 0; f < fields.length; f++) {
        fields[f].setAccessible(true);
        String fieldName = fields[f].getName().substring(0, 1).toUpperCase() + fields[f].getName().substring(1);
        if (fields[f].getName().equalsIgnoreCase("id")) {
            idField = fields[f];
            try {
                Object idValue = idField.get(result);
                String id = null;
                if (!idField.getType().getName().endsWith("String")) {
                    id = String.valueOf(idValue);
                }
                criteriaIdValue = SystemConstant.AT + idField.getName() + SystemConstant.EQUAL + id;
            } catch (Exception ex) {
                throw new IOException(ex.getMessage());
            }
        } else if (fieldName.equalsIgnoreCase("serialVersionUID")) {
            continue;
        }
        boolean bean = false;
        String fieldType = fields[f].getType().getName();
        if (fieldType.indexOf("$") > 1)
            fieldType = fieldType.substring(0, fieldType.indexOf("$"));

        Object value = "-";
        if (fields[f].get(result) != null) {
            value = fields[f].get(result);
            fieldType = value.getClass().getName();
            if (fieldType.indexOf("$") > 1)
                fieldType = fieldType.substring(0, fieldType.indexOf("$"));

            if (!fieldType.startsWith("java.")) {
                bean = locateClass(fieldType);
            }

        }

        String methName = "get" + fieldName.substring(0, 1).toUpperCase() + fieldName.substring(1);

        String beanName = null;

        if (bean) {
            beanName = fieldType.substring(fieldType.lastIndexOf(SystemConstant.DOT) + 1);
        }

        String returnObjectName = fields[f].getName();
        boolean collectionType = false;
        if (returnObjectName.endsWith("Collection") || fieldType.endsWith("Vector")
                || fieldType.endsWith("HashSet")) {
            collectionType = true;
        }

        if ((fieldType.startsWith("java.") && !(collectionType)) || fields[f].getType().isPrimitive()) {
            String strValue = " ";
            if (value != null) {
                strValue = String.valueOf(value);
            }
            out.println("<TD>" + strValue + "</TD>");
        } else if (returnObjectName.endsWith("Collection")) {

            String returnClassName = returnObjectName.substring(0, returnObjectName.lastIndexOf("Collection"));
            returnClassName = returnClassName.substring(0, 1).toUpperCase() + returnClassName.substring(1);
            String disp = "<TD><a href=" + servletName + "?query=" + returnClassName + SystemConstant.AMPERSAND
                    + className.substring(className.lastIndexOf(SystemConstant.DOT) + 1) + "[@"
                    + idField.getName() + SystemConstant.EQUAL + idField.get(result) + "]>" + methName
                    + "</a></TD>";

            out.println(disp);
        } else if (bean) {
            String disp = "<TD><a href=" + servletName + "?query=" + beanName + SystemConstant.AMPERSAND
                    + className.substring(className.lastIndexOf(SystemConstant.DOT) + 1) + "[@"
                    + idField.getName() + SystemConstant.EQUAL + idField.get(result) + "]>" + methName
                    + "</a></TD>";

            out.println(disp);
        }

    }
    recordNum++;
    out.println("</TR>");
}

From source file:org.apache.cocoon.servlet.CocoonServlet.java

/**
 * Process the specified <code>HttpServletRequest</code> producing output
 * on the specified <code>HttpServletResponse</code>.
 */// w  ww  . j a va  2s.  com
public void service(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {

    /* HACK for reducing class loader problems.                                     */
    /* example: xalan extensions fail if someone adds xalan jars in tomcat3.2.1/lib */
    if (this.initClassLoader) {
        try {
            Thread.currentThread().setContextClassLoader(this.classLoader);
        } catch (Exception e) {
        }
    }

    // used for timing the processing
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    // add the cocoon header timestamp
    if (this.showCocoonVersion) {
        res.addHeader("X-Cocoon-Version", Constants.VERSION);
    }

    // get the request (wrapped if contains multipart-form data)
    HttpServletRequest request;
    try {
        if (this.enableUploads) {
            request = requestFactory.getServletRequest(req);
        } else {
            request = req;
        }
    } catch (Exception e) {
        if (getLogger().isErrorEnabled()) {
            getLogger().error("Problem with Cocoon servlet", e);
        }

        manageException(req, res, null, null, HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                "Problem in creating the Request", null, null, e);
        return;
    }

    // Get the cocoon engine instance

    if (reloadCocoon(request.getPathInfo(), request.getParameter(Constants.RELOAD_PARAM))) {
        disposeCocoon();
        initLogger();
        createCocoon();
    }

    // Check if cocoon was initialized
    if (this.cocoon == null) {
        manageException(request, res, null, null, HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                "Initialization Problem", null /* "Cocoon was not initialized" */,
                null /* "Cocoon was not initialized, cannot process request" */, this.exception);
        return;
    }

    // We got it... Process the request
    String uri = request.getServletPath();
    if (uri == null) {
        uri = "";
    }
    String pathInfo = request.getPathInfo();
    if (pathInfo != null) {
        // VG: WebLogic fix: Both uri and pathInfo starts with '/'
        // This problem exists only in WL6.1sp2, not in WL6.0sp2 or WL7.0b.
        if (uri.length() > 0 && uri.charAt(0) == '/') {
            uri = uri.substring(1);
        }
        uri += pathInfo;
    }

    if (uri.length() == 0) {
        /* empty relative URI
         -> HTTP-redirect from /cocoon to /cocoon/ to avoid
            StringIndexOutOfBoundsException when calling
            "".charAt(0)
           else process URI normally
        */
        String prefix = request.getRequestURI();
        if (prefix == null) {
            prefix = "";
        }

        res.sendRedirect(res.encodeRedirectURL(prefix + "/"));
        return;
    }

    String contentType = null;
    ContextMap ctxMap = null;

    Environment env;
    try {
        if (uri.charAt(0) == '/') {
            uri = uri.substring(1);
        }
        // Pass uri into environment without URLDecoding, as it is already decoded.
        env = getEnvironment(uri, request, res);
    } catch (Exception e) {
        if (getLogger().isErrorEnabled()) {
            getLogger().error("Problem with Cocoon servlet", e);
        }

        manageException(request, res, null, uri, HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                "Problem in creating the Environment", null, null, e);
        return;
    }

    try {
        try {
            // Initialize a fresh log context containing the object model: it
            // will be used by the CocoonLogFormatter
            ctxMap = ContextMap.getCurrentContext();
            // Add thread name (default content for empty context)
            String threadName = Thread.currentThread().getName();
            ctxMap.set("threadName", threadName);
            // Add the object model
            ctxMap.set("objectModel", env.getObjectModel());
            // Add a unique request id (threadName + currentTime
            ctxMap.set("request-id", threadName + System.currentTimeMillis());

            if (this.cocoon.process(env)) {
                contentType = env.getContentType();
            } else {
                // We reach this when there is nothing in the processing change that matches
                // the request. For example, no matcher matches.
                getLogger().fatalError("The Cocoon engine failed to process the request.");
                manageException(request, res, env, uri, HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                        "Request Processing Failed", "Cocoon engine failed in process the request",
                        "The processing engine failed to process the request. This could be due to lack of matching or bugs in the pipeline engine.",
                        null);
                return;
            }
        } catch (ResourceNotFoundException e) {
            if (getLogger().isDebugEnabled()) {
                getLogger().warn(e.getMessage(), e);
            } else if (getLogger().isWarnEnabled()) {
                getLogger().warn(e.getMessage());
            }

            manageException(request, res, env, uri, HttpServletResponse.SC_NOT_FOUND, "Resource Not Found",
                    "Resource Not Found",
                    "The requested resource \"" + request.getRequestURI() + "\" could not be found", e);
            return;

        } catch (ConnectionResetException e) {
            if (getLogger().isDebugEnabled()) {
                getLogger().debug(e.toString(), e);
            } else if (getLogger().isWarnEnabled()) {
                getLogger().warn(e.toString());
            }

        } catch (IOException e) {
            // Tomcat5 wraps SocketException into ClientAbortException which extends IOException.
            if (getLogger().isDebugEnabled()) {
                getLogger().debug(e.toString(), e);
            } else if (getLogger().isWarnEnabled()) {
                getLogger().warn(e.toString());
            }

        } catch (Exception e) {
            if (getLogger().isErrorEnabled()) {
                getLogger().error("Internal Cocoon Problem", e);
            }

            manageException(request, res, env, uri, HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                    "Internal Server Error", null, null, e);
            return;
        }

        stopWatch.stop();
        String timeString = null;
        if (getLogger().isInfoEnabled()) {
            timeString = processTime(stopWatch.getTime());
            getLogger().info("'" + uri + "' " + timeString);
        }

        if (contentType != null && contentType.equals("text/html")) {
            String showTime = request.getParameter(Constants.SHOWTIME_PARAM);
            boolean show = this.showTime;
            if (showTime != null) {
                show = !showTime.equalsIgnoreCase("no");
            }
            if (show) {
                if (timeString == null) {
                    timeString = processTime(stopWatch.getTime());
                }
                boolean hide = this.hiddenShowTime;
                if (showTime != null) {
                    hide = showTime.equalsIgnoreCase("hide");
                }
                ServletOutputStream out = res.getOutputStream();
                out.print((hide) ? "<!-- " : "<p>");
                out.print(timeString);
                out.println((hide) ? " -->" : "</p>");
            }
        }
    } finally {
        if (ctxMap != null) {
            ctxMap.clear();
        }

        try {
            if (request instanceof MultipartHttpServletRequest) {
                if (getLogger().isDebugEnabled()) {
                    getLogger().debug("Deleting uploaded file(s).");
                }
                ((MultipartHttpServletRequest) request).cleanup();
            }
        } catch (IOException e) {
            getLogger().error("Cocoon got an Exception while trying to cleanup the uploaded files.", e);
        }

        /*
         * Servlet Specification 2.2, 6.5 Closure of Response Object:
         *
         *   A number of events can indicate that the servlet has provided all of the
         *   content to satisfy the request and that the response object can be
         *   considered to be closed. The events are:
         *     o The termination of the service method of the servlet.
         *     o When the amount of content specified in the setContentLength method
         *       of the response has been written to the response.
         *     o The sendError method is called.
         *     o The sendRedirect method is called.
         *   When a response is closed, all content in the response buffer, if any remains,
         *   must be immediately flushed to the client.
         *
         * Due to the above, out.flush() and out.close() are not necessary, and sometimes
         * (if sendError or sendRedirect were used) request may be already closed.
         */
    }
}

From source file:com.indeed.imhotep.web.QueryServlet.java

private SelectExecutionStats handleSelectStatement(final SelectRequestArgs args, final HttpServletResponse resp,
        SelectStatement parsedQuery, final ExecutionManager.QueryTracker queryTracker) throws IOException {
    // hashing is done before calling translate so only original JParsec parsing is considered
    final String queryForHashing = parsedQuery.toHashKeyString();

    final IQLQuery iqlQuery = IQLTranslator.translate(parsedQuery,
            args.interactive ? imhotepInteractiveClient : imhotepClient, args.imhotepUserName, metadata,
            imhotepLocalTempFileSizeLimit, imhotepDaemonTempFileSizeLimit);

    // TODO: handle requested format mismatch: e.g. cached CSV but asked for TSV shouldn't have to rerun the query
    final String queryHash = getQueryHash(queryForHashing, iqlQuery.getShardVersionList(), args.csv);
    final String cacheFileName = queryHash + (args.csv ? ".csv" : ".tsv");
    final boolean isCached = queryCache.isFileCached(cacheFileName);

    final QueryMetadata queryMetadata = new QueryMetadata();

    queryMetadata.addItem("IQL-Cached", isCached, true);
    final DateTime newestShard = getLatestShardVersion(iqlQuery.getShardVersionList());
    queryMetadata.addItem("IQL-Newest-Shard", newestShard, args.returnNewestShardVersion);

    final String shardList = shardListToString(iqlQuery.getShardVersionList());
    queryMetadata.addItem("IQL-Shard-List", shardList, args.returnShardlist);

    final List<Interval> timeIntervalsMissingShards = iqlQuery.getTimeIntervalsMissingShards();
    if (timeIntervalsMissingShards.size() > 0) {
        final String missingIntervals = intervalListToString(timeIntervalsMissingShards);
        queryMetadata.addItem("IQL-Missing-Shards", missingIntervals);
    }//from w  w  w .  jav a2  s.  c  o m

    queryMetadata.setPendingHeaders(resp);

    if (args.headOnly) {
        return new SelectExecutionStats(true);
    }
    final ServletOutputStream outputStream = resp.getOutputStream();
    if (args.progress) {
        outputStream.print(": This is the start of the IQL Query Stream\n\n");
    }
    if (!args.asynchronous) {
        ResultServlet.setContentType(resp, args.avoidFileSave, args.csv, args.progress);
        if (!args.cacheReadDisabled && isCached) {
            log.trace("Returning cached data in " + cacheFileName);

            // read metadata from cache
            try {
                final InputStream metadataCacheStream = queryCache
                        .getInputStream(cacheFileName + METADATA_FILE_SUFFIX);
                final QueryMetadata cachedMetadata = QueryMetadata.fromStream(metadataCacheStream);
                queryMetadata.mergeIn(cachedMetadata);

                queryMetadata.setPendingHeaders(resp);
                resp.setHeader("Access-Control-Expose-Headers", StringUtils.join(resp.getHeaderNames(), ", "));
                if (args.progress) {
                    outputStream.println("event: header");
                    outputStream.print("data: ");
                    outputStream.print(queryMetadata.toJSON() + "\n\n");
                }
            } catch (Exception e) {
                log.info("Failed to load metadata cache from " + cacheFileName + METADATA_FILE_SUFFIX, e);
            }

            final InputStream cacheInputStream = queryCache.getInputStream(cacheFileName);
            final int rowsWritten = IQLQuery.copyStream(cacheInputStream, outputStream, iqlQuery.getRowLimit(),
                    args.progress);
            outputStream.close();
            return new SelectExecutionStats(isCached, rowsWritten, false, queryHash, 0);
        }
        final IQLQuery.WriteResults writeResults;
        final IQLQuery.ExecutionResult executionResult;
        try {
            // TODO: should we always get totals? opt out http param?
            executionResult = iqlQuery.execute(args.progress, outputStream, true);
            queryMetadata.addItem("IQL-Timings", executionResult.getTimings().replace('\n', '\t'),
                    args.progress);
            queryMetadata.addItem("IQL-Imhotep-Temp-Bytes-Written",
                    executionResult.getImhotepTempFilesBytesWritten(), args.progress);
            queryMetadata.addItem("IQL-Totals", Arrays.toString(executionResult.getTotals()), args.getTotals);

            queryMetadata.setPendingHeaders(resp);
            resp.setHeader("Access-Control-Expose-Headers", StringUtils.join(resp.getHeaderNames(), ", "));

            if (args.progress) {
                outputStream.println("event: header");
                outputStream.print("data: ");
                outputStream.print(queryMetadata.toJSON() + "\n\n");
            }
            final Iterator<GroupStats> groupStats = executionResult.getRows();
            final int groupingColumns = Math.max(1,
                    (parsedQuery.groupBy == null || parsedQuery.groupBy.groupings == null) ? 1
                            : parsedQuery.groupBy.groupings.size());
            final int selectColumns = Math.max(1,
                    (parsedQuery.select == null || parsedQuery.select.getProjections() == null) ? 1
                            : parsedQuery.select.getProjections().size());
            if (!args.asynchronous) {
                writeResults = iqlQuery.outputResults(groupStats, outputStream, args.csv, args.progress,
                        iqlQuery.getRowLimit(), groupingColumns, selectColumns, args.cacheWriteDisabled);
            } else {
                writeResults = new IQLQuery.WriteResults(0, null, groupStats, 0);
            }
            if (!args.cacheWriteDisabled && !isCached) {
                executorService.submit(new Callable<Void>() {
                    @Override
                    public Void call() throws Exception {
                        try {
                            try {
                                final OutputStream metadataCacheStream = queryCache
                                        .getOutputStream(cacheFileName + METADATA_FILE_SUFFIX);
                                queryMetadata.toStream(metadataCacheStream);
                                metadataCacheStream.close();
                            } catch (Exception e) {
                                log.warn("Failed to upload metadata cache: " + cacheFileName, e);
                            }
                            try {
                                uploadResultsToCache(writeResults, cacheFileName, args.csv);
                            } catch (Exception e) {
                                log.warn("Failed to upload cache: " + cacheFileName, e);
                            }
                        } finally {
                            Closeables2.closeQuietly(queryTracker, log);
                        }
                        return null;
                    }
                });
                queryTracker.markAsynchronousRelease(); // going to be closed asynchronously after cache is uploaded
            }
        } catch (ImhotepOutOfMemoryException e) {
            throw Throwables.propagate(e);
        } finally {
            Closeables2.closeQuietly(iqlQuery, log);
        }
        outputStream.close();
        return new SelectExecutionStats(isCached, writeResults, queryHash,
                executionResult.getImhotepTempFilesBytesWritten());
    } else {
        // TODO: rework the async case to use the same code path as the sync case above except running under an executor
        if (!isCached && args.cacheWriteDisabled) {
            throw new IllegalStateException("Query cache is disabled so only synchronous calls can be served");
        }

        resp.setContentType("application/json");

        if (!isCached) {
            executorService.submit(new Callable<Void>() {
                @Override
                public Void call() throws Exception {
                    try {
                        // TODO: get totals working with the cache
                        final IQLQuery.ExecutionResult executionResult = iqlQuery.execute(false, null, false);
                        final Iterator<GroupStats> groupStats = executionResult.getRows();

                        final OutputStream cacheStream = queryCache.getOutputStream(cacheFileName);
                        IQLQuery.writeRowsToStream(groupStats, cacheStream, args.csv, Integer.MAX_VALUE, false);
                        cacheStream.close(); // has to be closed
                        return null;
                    } finally {
                        Closeables2.closeQuietly(iqlQuery, log);
                        Closeables2.closeQuietly(queryTracker, log);
                    }
                }
            });
            queryTracker.markAsynchronousRelease(); // going to be closed asynchronously after cache is uploaded
        }

        final URL baseURL = new URL(args.requestURL);
        final URL resultsURL = new URL(baseURL, "results/" + cacheFileName);

        final ObjectMapper mapper = new ObjectMapper();
        final ObjectNode ret = mapper.createObjectNode();
        ret.put("filename", resultsURL.toString());
        mapper.writeValue(outputStream, ret);
        outputStream.close();
        // we don't know number of rows as it's handled asynchronously
        return new SelectExecutionStats(isCached, new IQLQuery.WriteResults(0, null, null, 0), queryHash, 0);
    }
}