Example usage for java.util Arrays deepToString

List of usage examples for java.util Arrays deepToString

Introduction

In this page you can find the example usage for java.util Arrays deepToString.

Prototype

public static String deepToString(Object[] a) 

Source Link

Document

Returns a string representation of the "deep contents" of the specified array.

Usage

From source file:Methods.CalculusNewtonRaphson.java

private static void datasetPointsNewtonRapson() {// Method used to retrieve the stored values copy them in a array and store them in a global dataset

    double[] data1 = new double[xNewLinkedList.size()];
    double[] data2 = new double[xNewLinkedList.size()];
    int i = 0;/*from  www.java 2  s  .co m*/

    xNewHead = xNewLinkedList.head;
    xNewHead = xNewHead.getNext();
    while (xNewHead != null) {//while used to store data from linked list to an array
        data1[i] = xNewHead.getElement1();
        data2[i] = xNewHead.getElement2();
        i++;
        xNewHead = xNewHead.getNext();
    }

    double[][] data = { data1, data2 };
    System.out.println("TEST DEEP ARRAY NR: " + Arrays.deepToString(data));
    datasetPoints.addSeries("Iteration points", data);

}

From source file:com.celements.pagetype.service.PageTypeService.java

Set<PageTypeReference> getPageTypeRefsForCategories(Set<String> catList) {
    catList = new HashSet<String>(catList);
    Set<PageTypeReference> filteredPTset = new HashSet<PageTypeReference>();
    for (PageTypeReference pageTypeRef : getAllPageTypeRefs()) {
        List<String> categories = pageTypeRef.getCategories();
        if (categories.isEmpty()) {
            LOGGER.warn("getPageTypeRefsForCategories: skip pageTypeRef [" + pageTypeRef
                    + "] because no categories found!");
        } else if (LOGGER.isTraceEnabled()) {
            LOGGER.trace("getPageTypeRefsForCategories: test [" + pageTypeRef + "] for categories ["
                    + Arrays.deepToString(categories.toArray()) + "] size [" + categories.size() + "].");
        }/*from w w  w  .  j a va2 s.  co  m*/
        for (String category : pageTypeRef.getCategories()) {
            if (catList.contains(category)) {
                filteredPTset.add(pageTypeRef);
                LOGGER.trace("getPageTypeRefsForCategories: added [" + pageTypeRef + "] with category ["
                        + category + "].");
            } else {
                LOGGER.trace("getPageTypeRefsForCategories: skip [" + pageTypeRef + "] with category ["
                        + category + "].");
            }
        }
    }
    LOGGER.debug("getPageTypeRefsForCategories: for catList [" + Arrays.deepToString(catList.toArray())
            + "] return " + Arrays.deepToString(filteredPTset.toArray()));
    return filteredPTset;
}

From source file:com.celements.menu.MenuService.java

void addMenuHeaders(SortedMap<Integer, BaseObject> menuHeadersMap) {
    try {//w  w w  . j av a2 s.  co m
        List<String> result = queryManager.createQuery(getHeadersXWQL(), Query.XWQL).execute();
        if (LOGGER.isTraceEnabled()) {
            LOGGER.trace("addMenuHeaders received for " + getContext().getDatabase() + ": "
                    + Arrays.deepToString(result.toArray()));
        }
        for (String fullName : new HashSet<String>(result)) {
            DocumentReference menuBarDocRef = webUtilsService.resolveDocumentReference(fullName);
            if (hasview(menuBarDocRef)) {
                List<BaseObject> headerObjList = getContext().getWiki().getDocument(menuBarDocRef, getContext())
                        .getXObjects(getMenuBarHeaderClassRef(menuBarDocRef.getWikiReference().getName()));
                LOGGER.trace("addMenuHeaders: hasview for ["
                        + webUtilsService.getRefDefaultSerializer().serialize(menuBarDocRef)
                        + "] adding items [" + ((headerObjList != null) ? headerObjList.size() : "null") + ".");
                if (headerObjList != null) {
                    for (BaseObject obj : headerObjList) {
                        menuHeadersMap.put(obj.getIntValue("pos"), obj);
                    }
                }
            } else {
                LOGGER.trace("addMenuHeaders: NO hasview for ["
                        + webUtilsService.getRefDefaultSerializer().serialize(menuBarDocRef) + "].");
            }
        }
    } catch (XWikiException e) {
        LOGGER.error(e);
    } catch (QueryException e) {
        LOGGER.error(e);
    }
}

From source file:es.csic.iiia.planes.util.InverseWishartDistribution.java

private RealMatrix sampleWishart() {
    final int dim = scaleMatrix.getColumnDimension();

    // Build N_{ij}
    double[][] N = new double[dim][dim];
    for (int j = 0; j < dim; j++) {
        for (int i = 0; i < j; i++) {
            N[i][j] = random.nextGaussian();
        }//  w  w w  .  j a va  2s  . c  o m
    }
    if (LOG.isLoggable(Level.FINEST)) {
        LOG.log(Level.FINEST, "N = {0}", Arrays.deepToString(N));
    }

    // Build V_j
    double[] V = new double[dim];
    for (int i = 0; i < dim; i++) {
        V[i] = gammas[i].sample();
    }
    if (LOG.isLoggable(Level.FINEST)) {
        LOG.log(Level.FINEST, "V = {0}", Arrays.toString(V));
    }

    // Build B
    double[][] B = new double[dim][dim];

    // b_{11} = V_1 (first j, where sum = 0 because i == j and the inner
    //               loop is never entered).
    // b_{jj} = V_j + \sum_{i=1}^{j-1} N_{ij}^2, j = 2, 3, ..., p
    for (int j = 0; j < dim; j++) {
        double sum = 0;
        for (int i = 0; i < j; i++) {
            sum += Math.pow(N[i][j], 2);
        }
        B[j][j] = V[j] + sum;
    }
    if (LOG.isLoggable(Level.FINEST)) {
        LOG.log(Level.FINEST, "B*_jj : = {0}", Arrays.deepToString(B));
    }

    // b_{1j} = N_{1j} * \sqrt V_1
    for (int j = 1; j < dim; j++) {
        B[0][j] = N[0][j] * Math.sqrt(V[0]);
        B[j][0] = B[0][j];
    }
    if (LOG.isLoggable(Level.FINEST)) {
        LOG.log(Level.FINEST, "B*_1j = {0}", Arrays.deepToString(B));
    }

    // b_{ij} = N_{ij} * \sqrt V_1 + \sum_{k=1}^{i-1} N_{ki}*N_{kj}
    for (int j = 1; j < dim; j++) {
        for (int i = 1; i < j; i++) {
            double sum = 0;
            for (int k = 0; k < i; k++) {
                sum += N[k][i] * N[k][j];
            }
            B[i][j] = N[i][j] * Math.sqrt(V[i]) + sum;
            B[j][i] = B[i][j];
        }
    }
    if (LOG.isLoggable(Level.FINEST)) {
        LOG.log(Level.FINEST, "B* = {0}", Arrays.deepToString(B));
    }

    RealMatrix BMat = new Array2DRowRealMatrix(B);
    RealMatrix A = cholesky.getL().multiply(BMat).multiply(cholesky.getLT());
    if (LOG.isLoggable(Level.FINER)) {
        LOG.log(Level.FINER, "A* = {0}", Arrays.deepToString(N));
    }
    A = A.scalarMultiply(1 / df);
    return A;
}

From source file:com.joshdrummond.webpasswordsafe.android.GetCurrentPassword.java

private String parseResponse(String responseSOAP) {
    String response = "";
    try {//from   www . ja  va2  s.c  o m
        SAXParserFactory spf = SAXParserFactory.newInstance();
        SAXParser sp = spf.newSAXParser();
        XMLReader xr = sp.getXMLReader();
        GetCurrentPasswordHandler handler = new GetCurrentPasswordHandler();
        xr.setContentHandler(handler);
        xr.parse(new InputSource(new StringReader(responseSOAP)));
        response = handler.getParsedData();
    } catch (Exception e) {
        response = "ERROR parsing: " + Arrays.deepToString(e.getStackTrace());
    }
    return response;
}

From source file:org.batoo.jpa.jdbc.dbutils.QueryRunner.java

/**
 * Throws a new exception with a more informative error message.
 * /*from w ww .  j a va2s  .c  om*/
 * @param cause
 *            The original exception that will be chained to the new exception when it's rethrown.
 * 
 * @param sql
 *            The query that was executing when the exception happened.
 * 
 * @param params
 *            The query replacement parameters; <code>null</code> is a valid value to pass in.
 * @return SQLException if a database access error occurs
 */
private SQLException convertSqlException(SQLException cause, String sql, Object... params) {
    String causeMessage = cause.getMessage();
    if (causeMessage == null) {
        causeMessage = "";
    }

    final StringBuffer msg = new StringBuffer(causeMessage);

    msg.append(" Query: ");
    msg.append(sql);
    msg.append(" Parameters: ");

    if (params == null) {
        msg.append("[]");
    } else {
        msg.append(Arrays.deepToString(params));
    }

    final SQLException e = new SQLException(msg.toString(), cause.getSQLState(), cause.getErrorCode());
    e.setNextException(cause);

    return e;
}

From source file:de.kopis.glacier.parsers.GlacierUploaderOptionParserTest.java

@Test
public void hasActionOptionDownload() {
    final String[] newArgs = Arrays.copyOf(args, args.length + 2);
    newArgs[newArgs.length - 2] = "--download";
    newArgs[newArgs.length - 1] = "myarchiveid";

    final OptionSet optionSet = optionsParser.parse(newArgs);
    assertTrue("Option 'download' not found in " + Arrays.deepToString(optionSet.specs().toArray()),
            optionSet.has("download"));
    assertEquals("Value of option 'download' not found in " + Arrays.deepToString(optionSet.specs().toArray()),
            "myarchiveid", optionSet.valueOf("download"));
}

From source file:com.joyent.manta.http.ApacheHttpHeaderUtils.java

/**
 * In order to be sure we're continuing to download the same object we need to extract the {@code ETag} and {@code
 * Content-Range} headers from the response. Either header missing is an error. Additionally, when the {@code
 * Content-Range} header is present the specified range should be equal to the response's {@code Content-Length}.
 * If the {@code Content-Range} header is missing and {@code allowContentRangeInference} is true, we may infer the
 * response code was 200 and construct a representative {@code Content-Range} from byte offset 0 to
 * {@code Content-Length - 1}.//from   w  w w. jav a 2 s  .  c o  m
 *
 * @param response the response to check for headers
 * @param allowContentRangeInference whether or not we can derive a {@link HttpRange.Response} from the
 * {@code Content-Length} header instead of only using it for verification.
 * @return the request headers we're concerned with validating
 * @throws ProtocolException when the headers are malformed, unparseable, or the {@code
 * Content-Range} and {@code Content-Length} are mismatched
 */
static Pair<String, HttpRange.Response> extractDownloadResponseFingerprint(final HttpResponse response,
        final boolean allowContentRangeInference) throws ProtocolException {

    final String etag = extractSingleHeaderValue(response, ETAG, true);

    final long contentLength;
    try {
        final String rawContentLength = extractSingleHeaderValue(response, CONTENT_LENGTH, true);
        // since we're passing required=true an ProtocolException would be thrown and
        // @SuppressWarnings("ConstantConditions") is too blunt a hammer and would apply to the whole method, so...
        // noinspection ConstantConditions
        contentLength = Long.parseUnsignedLong(rawContentLength);
    } catch (final NumberFormatException e) {
        throw new ProtocolException(
                String.format("Failed to parse Content-Length response, matching headers: %s",
                        Arrays.deepToString(response.getHeaders(CONTENT_LENGTH))));
    }

    final String rawContentRange = extractSingleHeaderValue(response, CONTENT_RANGE, false);

    if (StringUtils.isBlank(rawContentRange)) {
        if (!allowContentRangeInference) {
            throw new ProtocolException("Content-Range header required but missing.");
        }

        // the entire object is being requested
        return new ImmutablePair<>(etag, new HttpRange.Response(0, contentLength - 1, contentLength));
    }

    final HttpRange.Response contentRange = parseContentRange(rawContentRange);

    // Manta follows the spec and sends the Content-Length of the range, which we should ensure matches
    if (contentRange.contentLength() != contentLength) {
        throw new ProtocolException(String.format(
                "Content-Range start-to-end size and Content-Length mismatch: expected [%d], got [%d]",
                contentRange.contentLength(), contentLength));
    }

    return new ImmutablePair<>(etag, contentRange);
}

From source file:com.google.uzaygezen.core.hbase.HBaseQueryTest.java

/**
 * With more than 62 bits (using {@link BigInteger} rather than plain
 * {@link Long}) and without any caching rollup version of the data
 * {@link BoundedRollup}, this way of building the queries is likely to be
 * quite slow, but it shows off the capability of perform queries of
 * non-cached arbitrary-precision data.//from w  ww. java2  s  . co m
 */
@Test
public void queryHBase() throws IOException, InterruptedException {
    MockHTable table = MockHTable.create();
    final byte[] family = "FAMILY".getBytes(Charsets.ISO_8859_1);
    /*
     * We choose not to store the coordinates themselves, since storing the
     * Hilbert index is sufficient to recover the coordinate values. So let's
     * use a dummy column.
     */
    final byte[][] qualifiers = { "NICE".getBytes(Charsets.ISO_8859_1), };
    MultiDimensionalSpec spec = new MultiDimensionalSpec(Ints.asList(30, 10, 25));
    // Add some data.
    Random rnd = new Random(TestUtils.SEED);
    int[][] data = generateData(spec, 1 << 16, rnd);
    SpaceFillingCurve sfc = new CompactHilbertCurve(spec);
    logger.log(Level.INFO, "Populating table with up to {0} rows.", data.length);
    populateTable(family, qualifiers, spec, data, sfc, table);
    int cacheSize = 1 << 8;
    logger.log(Level.INFO, "Building cache of size {0}.", cacheSize);
    // The cache is optional.
    Map<Pow2LengthBitSetRange, NodeValue<BigIntegerContent>> rolledupMap = createRolledupCache(table, spec, sfc,
            cacheSize);
    logger.log(Level.INFO, "Constructed cache of actual size {0}.", rolledupMap.size());
    for (int trial = 0; trial < 1; ++trial) {
        logger.log(Level.INFO, "trial={0}", trial);
        int[] maxLengthPerDimension = new int[spec.getBitsPerDimension().size()];
        for (boolean useCache : new boolean[] { false, true }) {
            int m = useCache ? 256 : 32;
            /*
             * For testing purposes limit the range size to m values for each
             * dimension to speed up query computation. In practice, query volume
             * should be enforced to be small, and when a certain query volume is
             * exceeded, a full table scan will probably be faster anyway.
             */
            Arrays.fill(maxLengthPerDimension, m);
            int[][] ranges = generateRanges(spec, maxLengthPerDimension, rnd);
            logger.log(Level.INFO, "ranges={0}", Arrays.deepToString(ranges));
            // Limit the maximum number of ranges.
            int maxRanges = 1 + rnd.nextInt(32);
            List<int[]> actual = queryAndFilter(table, spec, sfc, ranges, maxRanges,
                    useCache ? rolledupMap : null);
            List<int[]> expected = uniq(fullScanQuery(data, sfc, ranges));
            logger.log(Level.INFO, "expected.size()={0}", expected.size());
            Assert.assertEquals(expected.size(), actual.size());
            for (int i = 0; i < expected.size(); ++i) {
                Assert.assertArrayEquals(expected.get(i), actual.get(i));
            }
        }
    }
}

From source file:org.wso2.carbon.event.processor.common.storm.component.SiddhiBolt.java

@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
    if (siddhiManager == null) {
        init();/*from   w ww  .j  a  va  2 s  .  c o  m*/
    }
    inputThroughputProbe.update();

    try {
        this.collector = collector;
        InputHandler inputHandler = executionPlanRuntime.getInputHandler(tuple.getSourceStreamId());
        Object[] dataArray = tuple.getValues().toArray();
        long timestamp = (Long) dataArray[dataArray.length - 1];
        dataArray = ArrayUtils.remove(dataArray, dataArray.length - 1);

        if (log.isDebugEnabled()) {
            log.debug(logPrefix + "Received Event: " + tuple.getSourceStreamId() + ":"
                    + Arrays.deepToString(dataArray) + "@" + timestamp);
        }

        if (inputHandler != null) {
            inputHandler.send(timestamp, dataArray);
        } else {
            log.warn(logPrefix + "Event received for unknown stream " + tuple.getSourceStreamId()
                    + ". Discarding" + " the Event: " + tuple.getSourceStreamId() + ":"
                    + Arrays.deepToString(dataArray) + "@" + timestamp);
        }
    } catch (InterruptedException e) {
        log.error(e);
    }
}