Example usage for java.util Arrays deepToString

List of usage examples for java.util Arrays deepToString

Introduction

In this page you can find the example usage for java.util Arrays deepToString.

Prototype

public static String deepToString(Object[] a) 

Source Link

Document

Returns a string representation of the "deep contents" of the specified array.

Usage

From source file:org.batoo.jpa.core.impl.jdbc.dbutils.AbstractQueryRunner.java

/**
 * Throws a new exception with a more informative error message.
 * /* w w  w . j a v  a  2s . c o  m*/
 * @param cause
 *            The original exception that will be chained to the new exception when it's rethrown.
 * 
 * @param sql
 *            The query that was executing when the exception happened.
 * 
 * @param params
 *            The query replacement parameters; <code>null</code> is a valid value to pass in.
 * 
 * @throws SQLException
 *             if a database access error occurs
 */
protected void rethrow(SQLException cause, String sql, Object... params) throws SQLException {

    String causeMessage = cause.getMessage();
    if (causeMessage == null) {
        causeMessage = "";
    }
    final StringBuffer msg = new StringBuffer(causeMessage);

    msg.append(" Query: ");
    msg.append(sql);
    msg.append(" Parameters: ");

    if (params == null) {
        msg.append("[]");
    } else {
        msg.append(Arrays.deepToString(params));
    }

    final SQLException e = new SQLException(msg.toString(), cause.getSQLState(), cause.getErrorCode());
    e.setNextException(cause);

    throw e;
}

From source file:org.graphwalker.machines.ExtendedFiniteStateMachine.java

@Override
public String getCurrentDataString() {
    String retur = "";

    if (jsEngine != null) {
        Set<Entry<String, Object>> dataTable = getCurrentJsEngineData();
        for (Entry<String, Object> entry : dataTable) {
            if (!entry.getKey().equals("println") && !entry.getKey().equals("print")
                    && !entry.getKey().equals("context"))
                retur += entry.getKey() + "=" + entry.getValue() + ";";
        }// ww w . ja  v a2  s  . c om
    } else if (beanShellEngine != null) {
        Hashtable<String, Object> dataTable = getCurrentBeanShellData();
        Enumeration<String> e = dataTable.keys();
        while (e.hasMoreElements()) {
            String key = e.nextElement();
            String data = "";
            if (dataTable.get(key) instanceof Object[]) {
                data = Arrays.deepToString((Object[]) dataTable.get(key));
            } else {
                data = dataTable.get(key).toString();
            }
            retur += key + "=" + data + ";";
        }
    }
    return retur;
}

From source file:ca.on.oicr.pde.deciders.GenomicAlignmentNovoalignDecider.java

public static void main(String args[]) {

    List<String> params = new ArrayList<String>();
    params.add("--plugin");
    params.add(GenomicAlignmentNovoalignDecider.class.getCanonicalName());
    params.add("--");
    params.addAll(Arrays.asList(args));
    System.out.println("Parameters: " + Arrays.deepToString(params.toArray()));
    net.sourceforge.seqware.pipeline.runner.PluginRunner.main(params.toArray(new String[params.size()]));

}

From source file:edu.harvard.iq.dataverse.rserve.RemoteDataFrameService.java

public Map<String, String> execute(RJobRequest sro) {
    dbgLog.fine("RemoteDataFrameService: execute() starts here.");

    // set the return object
    Map<String, String> result = new HashMap<String, String>();

    try {//ww w .  j  a  v a2s .  c o  m
        // TODO: 
        // Split the code below into neat individual methods - for 
        // initializing the connection, loading the remote libraries, 
        // creating remote R vectors for the parameters that will be used 
        // to create the data frame - variable names, labels, etc., and 
        // executing the main request and any necessary post-processing
        // -- L.A. 4.0 alpha 1

        // Set up an Rserve connection
        dbgLog.fine("sro dump:\n" + ToStringBuilder.reflectionToString(sro, ToStringStyle.MULTI_LINE_STYLE));

        dbgLog.fine("RSERVE_USER=" + RSERVE_USER + "[default=rserve]");
        dbgLog.fine("RSERVE_PASSWORD=" + RSERVE_PWD + "[default=rserve]");
        dbgLog.fine("RSERVE_PORT=" + RSERVE_PORT + "[default=6311]");
        dbgLog.fine("RSERVE_HOST=" + RSERVE_HOST);

        RConnection c = new RConnection(RSERVE_HOST, RSERVE_PORT);

        c.login(RSERVE_USER, RSERVE_PWD);
        dbgLog.info(">" + c.eval("R.version$version.string").asString() + "<");

        // check working directories
        // This needs to be done *before* we try to create any files 
        // there!
        setupWorkingDirectory(c);

        // send the data file to the Rserve side:

        String infile = sro.getTabularDataFileName();
        InputStream inb = new BufferedInputStream(new FileInputStream(infile));

        int bufsize;
        byte[] bffr = new byte[1024];

        RFileOutputStream os = c.createFile(tempFileNameIn);
        while ((bufsize = inb.read(bffr)) != -1) {
            os.write(bffr, 0, bufsize);
        }
        os.close();
        inb.close();

        // Rserve code starts here
        dbgLog.fine("wrkdir=" + RSERVE_TMP_DIR);

        String RversionLine = "R.Version()$version.string";
        String Rversion = c.eval(RversionLine).asString();

        // We need to initialize our R session:
        // send custom R code library over to the Rserve and load the code:

        String rscript = readLocalResource(DATAVERSE_R_FUNCTIONS);
        c.voidEval(rscript);

        dbgLog.fine("raw variable type=" + sro.getVariableTypes());
        c.assign("vartyp", new REXPInteger(sro.getVariableTypes()));
        String[] tmpt = c.eval("vartyp").asStrings();
        dbgLog.fine("vartyp length=" + tmpt.length + "\t " + StringUtils.join(tmpt, ","));

        // variable *formats* - not to be confused with variable *types*!
        // these specify extra, optional format specifications - for example, 
        // String variables may represent date and time values. 

        Map<String, String> tmpFmt = sro.getVariableFormats();

        dbgLog.fine("tmpFmt=" + tmpFmt);

        // In the fragment below we create an R list varFrmt storing 
        // these format specifications: 

        if (tmpFmt != null) {
            Set<String> vfkeys = tmpFmt.keySet();
            String[] tmpfk = (String[]) vfkeys.toArray(new String[vfkeys.size()]);
            String[] tmpfv = getValueSet(tmpFmt, tmpfk);
            c.assign("tmpfk", new REXPString(tmpfk));
            c.assign("tmpfv", new REXPString(tmpfv));
            String fmtNamesLine = "names(tmpfv)<- tmpfk";
            c.voidEval(fmtNamesLine);
            String fmtValuesLine = "varFmt<- as.list(tmpfv)";
            c.voidEval(fmtValuesLine);
        } else {
            String[] varFmtN = {};
            List<String> varFmtV = new ArrayList<String>();
            c.assign("varFmt", new REXPList(new RList(varFmtV, varFmtN)));
        }

        // Variable names:
        String[] jvnamesRaw = sro.getVariableNames();
        String[] jvnames = null;

        if (sro.hasUnsafeVariableNames) {
            // create  list
            jvnames = sro.safeVarNames;
            dbgLog.fine("renamed=" + StringUtils.join(jvnames, ","));
        } else {
            jvnames = jvnamesRaw;
        }

        c.assign("vnames", new REXPString(jvnames));

        // confirm:

        String[] tmpjvnames = c.eval("vnames").asStrings();
        dbgLog.fine("vnames:" + StringUtils.join(tmpjvnames, ","));

        // read.dataverseTabData method, from dataverse_r_functions.R, 
        // uses R's standard scan() function to read the tabular data we've 
        // just transfered over and turn it into a dataframe. It adds some 
        // custom post-processing too - restores missing values, converts 
        // strings representing dates and times into R date and time objects, 
        // and more. 

        // Parameters for the read.dataverseTabData method executed on the R side:

        // file -> tempFileName
        // col.names -> Arrays.deepToString(new REXPString(jvnames)).asStrings())
        // colClassesx -> Arrays.deepToString((new REXPInteger(sro.getVariableTypes())).asStrings())
        // varFormat -> Arrays.deepToString((new REXPString(getValueSet(tmpFmt, tmpFmt.keySet().toArray(new String[tmpFmt.keySet().size()])))).asStrings())

        dbgLog.fine("read.dataverseTabData parameters:");
        dbgLog.fine("col.names = " + Arrays.deepToString((new REXPString(jvnames)).asStrings()));
        dbgLog.fine(
                "colClassesx = " + Arrays.deepToString((new REXPInteger(sro.getVariableTypes())).asStrings()));
        dbgLog.fine("varFormat = " + Arrays.deepToString((new REXPString(
                getValueSet(tmpFmt, tmpFmt.keySet().toArray(new String[tmpFmt.keySet().size()]))))
                        .asStrings()));

        String readtableline = "x<-read.dataverseTabData(file='" + tempFileNameIn
                + "', col.names=vnames, colClassesx=vartyp, varFormat=varFmt )";
        dbgLog.fine("readtable=" + readtableline);

        c.voidEval(readtableline);

        if (sro.hasUnsafeVariableNames) {
            dbgLog.fine("unsafeVariableNames exist");
            jvnames = sro.safeVarNames;
            String[] rawNameSet = sro.renamedVariableArray;
            String[] safeNameSet = sro.renamedResultArray;

            c.assign("tmpRN", new REXPString(rawNameSet));
            c.assign("tmpSN", new REXPString(safeNameSet));

            String raw2safevarNameTableLine = "names(tmpRN)<- tmpSN";
            c.voidEval(raw2safevarNameTableLine);
            String attrRsafe2rawLine = "attr(x, 'Rsafe2raw')<- as.list(tmpRN)";
            c.voidEval(attrRsafe2rawLine);
        } else {
            String attrRsafe2rawLine = "attr(x, 'Rsafe2raw')<-list();";
            c.voidEval(attrRsafe2rawLine);
        }

        // Restore NAs (missign values) in the data frame:
        // (these are encoded as empty strings in dataverse tab files)
        // Why are we doing it here? And not in the dataverse_r_functions.R 
        // fragment? 

        String asIsline = "for (i in 1:dim(x)[2]){ " + "if (attr(x,'var.type')[i] == 0) {"
                + "x[[i]]<-I(x[[i]]);  x[[i]][ x[[i]] == '' ]<-NA  }}";
        c.voidEval(asIsline);

        String[] varLabels = sro.getVariableLabels();

        c.assign("varlabels", new REXPString(varLabels));

        String attrVarLabelsLine = "attr(x, 'var.labels')<-varlabels";
        c.voidEval(attrVarLabelsLine);

        // Confirm:
        String[] vlbl = c.eval("attr(x, 'var.labels')").asStrings();
        dbgLog.fine("varlabels=" + StringUtils.join(vlbl, ","));

        // create the VALTABLE and VALORDER lists:
        c.voidEval("VALTABLE<-list()");
        c.voidEval("VALORDER<-list()");

        //In the fragment below, we'll populate the VALTABLE list that we've
        // just created with the actual values and labels of our categorical varaibles.
        // TODO: 
        // This code has been imported from the DVN v2-3
        // implementation. I keep wondering if there is a simpler way to
        // achive this - to pass these maps of values and labels to R 
        // in fewer steps/with less code - ?
        // -- L.A. 4.3

        Map<String, Map<String, String>> vltbl = sro.getValueTable();
        Map<String, List<String>> orderedCategoryValues = sro.getCategoryValueOrders();
        String[] variableIds = sro.getVariableIds();

        for (int j = 0; j < variableIds.length; j++) {
            // if this variable has a value-label table,
            // pass its key and value arrays to Rserve;
            // finalize a value-table on the Rserve side:

            String varId = variableIds[j];

            if (vltbl.containsKey(varId)) {

                Map<String, String> tmp = (HashMap<String, String>) vltbl.get(varId);
                Set<String> vlkeys = tmp.keySet();
                String[] tmpk = (String[]) vlkeys.toArray(new String[vlkeys.size()]);
                String[] tmpv = getValueSet(tmp, tmpk);

                dbgLog.fine("tmp:k=" + StringUtils.join(tmpk, ","));
                dbgLog.fine("tmp:v=" + StringUtils.join(tmpv, ","));

                // index number starts from 1(not 0):
                int indx = j + 1;
                dbgLog.fine("index=" + indx);

                if (tmpv.length > 0) {

                    c.assign("tmpk", new REXPString(tmpk));

                    c.assign("tmpv", new REXPString(tmpv));

                    String namesValueLine = "names(tmpv)<- tmpk";
                    c.voidEval(namesValueLine);

                    String sbvl = "VALTABLE[['" + Integer.toString(indx) + "']]" + "<- as.list(tmpv)";
                    dbgLog.fine("frag=" + sbvl);
                    c.voidEval(sbvl);

                    // confirmation test for j-th variable name
                    REXP jl = c.parseAndEval(sbvl);
                    dbgLog.fine("jl(" + j + ") = " + jl);
                }
            }

            // If this is an ordered categorical value (and that means,
            // it was produced from an ordered factor, from an ingested 
            // R data frame, since no other formats we support have 
            // ordered categoricals), we'll also supply a list of these
            // ordered values:

            if (orderedCategoryValues != null && orderedCategoryValues.containsKey(varId)) {
                int indx = j + 1;
                List<String> orderList = orderedCategoryValues.get(varId);
                if (orderList != null) {
                    String[] ordv = (String[]) orderList.toArray(new String[orderList.size()]);
                    dbgLog.fine("ordv=" + StringUtils.join(ordv, ","));
                    c.assign("ordv", new REXPString(ordv));
                    String sbvl = "VALORDER[['" + Integer.toString(indx) + "']]" + "<- as.list(ordv)";
                    dbgLog.fine("VALORDER[...]=" + sbvl);
                    c.voidEval(sbvl);
                } else {
                    dbgLog.fine("NULL orderedCategoryValues list.");
                }
            }
        }

        // And now we store the VALTABLE and MSVLTBL as attributes of the 
        // dataframe we are cooking:
        dbgLog.fine("length of vl=" + c.eval("length(VALTABLE)").asInteger());
        String attrValTableLine = "attr(x, 'val.table')<-VALTABLE";
        c.voidEval(attrValTableLine);

        String msvStartLine = "MSVLTBL<-list();";
        c.voidEval(msvStartLine);
        String attrMissvalLine = "attr(x, 'missval.table')<-MSVLTBL";
        c.voidEval(attrMissvalLine);

        // But we are not done, with these value label maps... We now need
        // to call these methods from the dataverse_r_functions.R script
        // to further process the lists. Among other things, they will 
        // create these new lists - value index and missing value index, that 
        // simply indicate which variables have any of the above; these will 
        // also be saved as attributes of the data frame, val.index and 
        // missval.index respectively. But, also, the methods will reprocess
        // and overwite the val.table and missval.table attributes already stored in 
        // the dataframe. I don't fully understand why that is necessary, or what it is
        // that we are actually adding to the lists there... Another TODO: ? 

        String createVIndexLine = "x<-createvalindex(dtfrm=x, attrname='val.index');";
        c.voidEval(createVIndexLine);
        String createMVIndexLine = "x<-createvalindex(dtfrm=x, attrname='missval.index');";
        c.voidEval(createMVIndexLine);

        // And now we'll call the last method from the R script - createDataverseDataFrame();
        // It should probably be renamed. The dataframe has already been created. 
        // what this method does, it goes through the frame, and changes the 
        // vectors representing categorical variables to R factors. 
        // For example, if this tabular file was produced from a Stata file 
        // that had a categorical in which "Male" and "Female" were represented 
        // with 0 and 1. In the Dataverse datbase, the string values "Male" and 
        // "Female" are now stored as "categorical value labels". And the column 
        // in the tab file has numeric 1 and 0s. That's what the R
        // dataframe was created from, so it now has a numeric vector of 1s and 0s
        // representing this variable. So in this step we are going 
        // to change this vector into a factor, using the labels and values 
        // that we already passed over via Rserve and stored in the val.table, above. 

        // TODO: 
        // I'm going to propose that we go back to what we used to do back in 
        // DVN 2-3.* - instead of giving the user a single dataframe (.RData) 
        // file, provide a zip file, with the data frame, and also a README 
        // file with some documentation explaining how the data frame was 
        // created, and pointing out some potential issues stemming from the 
        // conversion between formats. Converting Stata categoricals into 
        // R factors is one of such issues (if nothing else, do note that 
        // the UNF of the datafile with the column described in the example 
        // above will change, if the resulting R dataframe is reingested! See 
        // the UNF documentation for more info...). We may also make this 
        // download interactive - giving the user some options for how 
        // to handle the conversion (so, another choice would be to convert 
        // the above to a factor of "0" and "1"s), etc. 
        // -- L.A. 4.3

        String dataFileName = "Data." + PID + "." + sro.getFormatRequested();

        // data file to be copied back to the dvn
        String dsnprfx = RSERVE_TMP_DIR + "/" + dataFileName;

        String dataverseDataFrameCommand = "createDataverseDataFrame(dtfrm=x," + "dwnldoptn='"
                + sro.getFormatRequested() + "'" + ", dsnprfx='" + dsnprfx + "')";

        c.voidEval(dataverseDataFrameCommand);

        int wbFileSize = getFileSize(c, dsnprfx);

        dbgLog.fine("wbFileSize=" + wbFileSize);

        // If the above succeeded, the dataframe has been saved on the 
        // Rserve side as an .Rdata file. Now we can transfer it back to the
        // dataverse side:

        File localDataFrameFile = transferRemoteFile(c, dsnprfx, RWRKSP_FILE_PREFIX, "RData", wbFileSize);

        result.put("dataFrameFileName", localDataFrameFile.getAbsolutePath());

        if (localDataFrameFile != null) {
            dbgLog.fine("data frame file name: " + localDataFrameFile.getAbsolutePath());
        } else {
            dbgLog.fine("data frame file is null!");
            // throw an exception??
        }

        result.put("Rversion", Rversion);

        dbgLog.fine("result object (before closing the Rserve):\n" + result);

        String deleteLine = "file.remove('" + tempFileNameIn + "')";
        c.eval(deleteLine);

        c.close();

    } catch (RserveException rse) {
        // RserveException (Rserve is not running maybe?)
        // TODO: *ABSOLUTELY* need more diagnostics here!
        rse.printStackTrace();
        result.put("RexecError", "true");
        return result;

    } catch (REXPMismatchException mme) {
        mme.printStackTrace();
        result.put("RexecError", "true");
        return result;

    } catch (IOException ie) {
        ie.printStackTrace();
        result.put("RexecError", "true");
        return result;

    } catch (Exception ex) {
        ex.printStackTrace();
        result.put("RexecError", "true");
        return result;
    }

    return result;

}

From source file:org.wso2.carbon.databridge.core.internal.EventDispatcher.java

private StreamTypeHolder getStreamDefinitionHolder(int tenantId) {
    // this will occur only outside of carbon (ex: Siddhi)

    StreamTypeHolder streamTypeHolder = domainNameStreamTypeHolderCache.get(tenantId);

    if (streamTypeHolder != null) {
        if (log.isDebugEnabled()) {
            String logMsg = "Event stream holder for tenant : " + tenantId + " : \n ";
            logMsg += "Meta, Correlation & Payload Data Type Map : ";
            for (Map.Entry entry : streamTypeHolder.getAttributeCompositeMap().entrySet()) {
                logMsg += "StreamID=" + entry.getKey() + " :  ";
                logMsg += "Meta= " + Arrays.deepToString(
                        ((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[0]) + " :  ";
                logMsg += "Correlation= " + Arrays.deepToString(
                        ((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[1]) + " :  ";
                logMsg += "Payload= " + Arrays.deepToString(
                        ((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[2]) + "\n";
            }//from  w w w  . j  av  a 2 s . co m
            log.debug(logMsg);
        }
        return streamTypeHolder;
    } else {
        return initDomainNameStreamTypeHolderCache(tenantId);
    }
}

From source file:com.laudandjolynn.mytv.Main.java

/**
 * ?//from  w  w  w. j av a2 s  .c  om
 * 
 * @param data
 * @param tvService
 */
private static void createEverydayCron(final MyTvData data, final TvService tvService) {
    ScheduledExecutorService scheduled = new ScheduledThreadPoolExecutor(3,
            new BasicThreadFactory.Builder().namingPattern("Mytv_Scheduled_Task").build());
    Date today = new Date();
    String nextWeek = DateUtils.date2String(DateUtils.nextWeek(today), "yyyy-MM-dd 00:01:00");
    long crawlTaskInitDelay = (DateUtils.string2Date(nextWeek).getTime() - today.getTime()) / 1000;
    logger.info("cron crawler task will be automatic start after " + crawlTaskInitDelay + " seconds at "
            + nextWeek);
    scheduled.scheduleWithFixedDelay(new Runnable() {

        @Override
        public void run() {
            Date[] weeks = DateUtils.getWeek(new Date());
            logger.info("begin to crawl program table of " + Arrays.deepToString(weeks));
            ExecutorService executorService = Executors.newFixedThreadPool(Constant.CPU_PROCESSOR_NUM,
                    new BasicThreadFactory.Builder().namingPattern("Mytv_Schedule_Crawl_Program_Table_%d")
                            .build());
            List<TvStation> stationList = tvService.getDisplayedTvStation();
            for (Date date : weeks) {
                crawlAllProgramTable(stationList, executorService, DateUtils.date2String(date, "yyyy-MM-dd"),
                        tvService);
            }
            executorService.shutdown();
        }
    }, crawlTaskInitDelay, 604860, TimeUnit.SECONDS);

    // ??
    String nextDate = DateUtils.tommorow() + " 23:00:00";
    long commonInitDelay = (DateUtils.string2Date(nextDate).getTime() - today.getTime()) / 1000;
    logger.info("cron refresh proxy task will be automatic start after " + commonInitDelay + " seconds at "
            + nextDate);
    scheduled.scheduleWithFixedDelay(new Runnable() {

        @Override
        public void run() {
            logger.info("begin to refresh proxies.");
            MyTvProxyManager.getInstance().refresh();
        }
    }, commonInitDelay, 86400, TimeUnit.SECONDS);

    // 
    logger.info("cron refresh cache task will be automatic start after " + commonInitDelay + " seconds at "
            + nextDate);
    scheduled.scheduleWithFixedDelay(new Runnable() {

        @Override
        public void run() {
            logger.info("begin to refresh caches.");
            makeCache(tvService);
        }
    }, commonInitDelay, 86400, TimeUnit.SECONDS);

    // scheduled?????
    // scheduled.shutdown();
}

From source file:io.druid.segment.IndexIO.java

public static void validateRowValues(Map<String, DimensionHandler> dimHandlers, Rowboat rb1,
        IndexableAdapter adapter1, Rowboat rb2, IndexableAdapter adapter2) {
    if (rb1.getTimestamp() != rb2.getTimestamp()) {
        throw new SegmentValidationException("Timestamp mismatch. Expected %d found %d", rb1.getTimestamp(),
                rb2.getTimestamp());//w  ww.  jav  a2 s.c  o m
    }
    final Object[] dims1 = rb1.getDims();
    final Object[] dims2 = rb2.getDims();
    if (dims1.length != dims2.length) {
        throw new SegmentValidationException("Dim lengths not equal %s vs %s", Arrays.deepToString(dims1),
                Arrays.deepToString(dims2));
    }
    final Indexed<String> dim1Names = adapter1.getDimensionNames();
    final Indexed<String> dim2Names = adapter2.getDimensionNames();
    for (int i = 0; i < dims1.length; ++i) {
        final Object dim1Vals = dims1[i];
        final Object dim2Vals = dims2[i];
        final String dim1Name = dim1Names.get(i);
        final String dim2Name = dim2Names.get(i);

        ColumnCapabilities capabilities1 = adapter1.getCapabilities(dim1Name);
        ColumnCapabilities capabilities2 = adapter2.getCapabilities(dim2Name);
        ValueType dim1Type = capabilities1.getType();
        ValueType dim2Type = capabilities2.getType();
        if (dim1Type != dim2Type) {
            throw new SegmentValidationException("Dim [%s] types not equal. Expected %d found %d", dim1Name,
                    dim1Type, dim2Type);
        }

        DimensionHandler dimHandler = dimHandlers.get(dim1Name);
        dimHandler.validateSortedEncodedKeyComponents(dim1Vals, dim2Vals, adapter1.getDimValueLookup(dim1Name),
                adapter2.getDimValueLookup(dim2Name));
    }
}

From source file:org.apache.zeppelin.submarine.hadoop.YarnClient.java

public List<Map<String, Object>> getAppAttempts(String appId) {
    List<Map<String, Object>> appAttempts = new ArrayList<>();
    String appUrl = this.yarnWebHttpAddr + "/ws/v1/cluster/apps/" + appId + "/appattempts?_="
            + System.currentTimeMillis();

    InputStream inputStream = null;
    try {//  www  .j  a  va 2 s  .  com
        HttpResponse response = callRestUrl(appUrl, principal, HTTP.GET);
        inputStream = response.getEntity().getContent();
        String result = new BufferedReader(new InputStreamReader(inputStream)).lines()
                .collect(Collectors.joining(System.lineSeparator()));
        if (response.getStatusLine().getStatusCode() != 200 /*success*/) {
            LOGGER.warn("Status code " + response.getStatusLine().getStatusCode());
            LOGGER.warn("message is :" + Arrays.deepToString(response.getAllHeaders()));
            LOGGER.warn("result\n" + result);
        }

        // parse app status json
        appAttempts = parseAppAttempts(result);
    } catch (Exception exp) {
        exp.printStackTrace();
    } finally {
        try {
            if (null != inputStream) {
                inputStream.close();
            }
        } catch (Exception e) {
            LOGGER.error(e.getMessage(), e);
        }
    }

    return appAttempts;
}

From source file:org.wso2.carbon.databridge.core.internal.EventDispatcher.java

public synchronized void updateStreamDefinitionHolder(AgentSession agentSession) {
    int tenantId = agentSession.getCredentials().getTenantId();
    StreamTypeHolder streamTypeHolder = domainNameStreamTypeHolderCache.get(tenantId);

    if (streamTypeHolder != null) {
        if (log.isDebugEnabled()) {
            String logMsg = "Event stream holder for tenant : " + tenantId + " : \n ";
            logMsg += "Meta, Correlation & Payload Data Type Map : ";
            for (Map.Entry entry : streamTypeHolder.getAttributeCompositeMap().entrySet()) {
                logMsg += "StreamID=" + entry.getKey() + " :  ";
                logMsg += "Meta= " + Arrays.deepToString(
                        ((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[0]) + " :  ";
                logMsg += "Correlation= " + Arrays.deepToString(
                        ((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[1]) + " :  ";
                logMsg += "Payload= " + Arrays.deepToString(
                        ((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[2]) + "\n";
            }//from  w  ww . j ava 2 s.  com
            log.debug(logMsg);
        }
        updateDomainNameStreamTypeHolderCache(tenantId);
    }
}

From source file:org.richfaces.tests.metamer.ftest.AbstractGrapheneTest.java

public void testRequestEventsAfter(String... events) {
    String[] actualEvents = selenium.getEval(new JavaScript("window.metamerEvents")).split(" ");
    assertEquals(actualEvents, events, format("The events ({0}) don't came in right order ({1})",
            Arrays.deepToString(actualEvents), Arrays.deepToString(events)));
}