Example usage for java.util Collections nCopies

List of usage examples for java.util Collections nCopies

Introduction

In this page you can find the example usage for java.util Collections nCopies.

Prototype

public static <T> List<T> nCopies(int n, T o) 

Source Link

Document

Returns an immutable list consisting of n copies of the specified object.

Usage

From source file:com.oneops.boo.BooConfigInterpolator.java

private String file(String path, boolean keepNewlines, int numWhitespaceToBePrepend) {
    if (path.startsWith("~")) {
        path = path.replace("~", HOME);
    } else if (path.startsWith("@")) {
        path = path.substring(1);/* w w w. ja v a  2 s.  c  o m*/
    } else if (path.startsWith("./")) {
        path = path.replace("./", String.format("%s%s", WORK, File.separator));
    }
    try {
        final String contents = keepNewlines ? readFileToString(new File(path))
                : FileUtils.readFileToString(new File(path), StandardCharsets.UTF_8);
        if (numWhitespaceToBePrepend > 0) {
            final String lines[] = contents.split("\\r?\\n");
            final StringBuilder sb = new StringBuilder();
            final String whitespaces = String.join("", Collections.nCopies(numWhitespaceToBePrepend, " "));
            boolean isFirstLineVisited = false;
            for (String line : lines) {
                if (isFirstLineVisited) {
                    sb.append(whitespaces).append(line).append('\n');
                } else {
                    sb.append(line).append('\n');
                    isFirstLineVisited = true;
                }
            }
            return sb.toString();
        } else {
            return contents;
        }

    } catch (IOException e) {
        // Content that might be required for the compute to function may be ommitted so just fail
        // fast. If it's an ssh public key that is meant to be injected and it doesn't work it will result
        // in a compute you can't log in to.
        throw new RuntimeException(String.format("%s cannot be found or cannot be read.", path));
    }
}

From source file:org.kuali.student.enrollment.class2.courseoffering.service.impl.TestCourseOfferingCodeGeneratorImpl.java

private void test(final String courseOfferingCode, final int threadCount)
        throws InterruptedException, ExecutionException {

    Callable<String> task = new Callable<String>() {

        @Override//from w  w  w .  j a va  2  s. c  o  m

        public String call() {

            return offeringCodeGenerator
                    .generateActivityOfferingCode(_makeDefaultMap(courseOfferingCode, new ArrayList<String>()));

        }

    };

    List<Callable<String>> tasks = Collections.nCopies(threadCount, task);

    ExecutorService executorService = Executors.newFixedThreadPool(threadCount);

    List<Future<String>> futures = executorService.invokeAll(tasks);

    List<String> resultList = new ArrayList<String>(futures.size());

    // Check for exceptions

    for (Future<String> future : futures) {

        // Throws an exception if an exception was thrown by the task.

        resultList.add(future.get());

    }

    // Validate the IDs

    assert (futures.size() == threadCount);

    List<String> expectedList = new ArrayList<String>(threadCount);

    String nextCode = "";
    for (long i = 1; i <= threadCount; i++) {
        nextCode = getNextCode(nextCode);
        expectedList.add(nextCode);

    }

    Collections.sort(resultList);

    System.out.println("Expected/Got: \n" + expectedList + "\n" + resultList);

    for (int i = 0; i < resultList.size(); i++) {
        assertTrue("\nWas expecting \n[" + expectedList.get(i) + "] but got \n[" + resultList.get(i) + "]\n",
                expectedList.get(i).equals(resultList.get(i)));
    }

}

From source file:com.networknt.client.ClientTest.java

private void callApiSyncMultiThread(final int threadCount) throws InterruptedException, ExecutionException {
    Callable<String> task = new Callable<String>() {
        @Override//from   w  w w  .  ja  v  a 2 s  .c om
        public String call() throws Exception {
            return callApiSync();
        }
    };
    List<Callable<String>> tasks = Collections.nCopies(threadCount, task);
    long start = System.currentTimeMillis();
    ExecutorService executorService = Executors.newFixedThreadPool(threadCount);
    List<Future<String>> futures = executorService.invokeAll(tasks);
    List<String> resultList = new ArrayList<String>(futures.size());
    // Check for exceptions
    for (Future<String> future : futures) {
        // Throws an exception if an exception was thrown by the task.
        resultList.add(future.get());
    }
    long last = (System.currentTimeMillis() - start);
    System.out.println("resultList = " + resultList + " response time = " + last);
}

From source file:org.mule.module.blink1.Blink1Module.java

/**
 * Clear the pattern stored in a blink(1) device.
 * <p/>/*from ww w . j  av a  2  s. c o m*/
 * {@sample.xml ../../../doc/blink1-connector.xml.sample blink1:clear-pattern}
 * 
 * @param deviceId unique identifier of the device.
 */
@Processor
public void clearPattern(final int deviceId) {
    final List<PatternEntry> entries = Collections.nCopies(MAX_PATTERN_POSITION + 1,
            new PatternEntry(0, "black"));
    storePattern(deviceId, entries, 0);
}

From source file:au.id.tmm.anewreader.model.Model.java

/**
 * For a list of item ids, returns the corresponding Item objects. These are retrieved from the
 * ItemCache if possible, otherwise they are retrieved from the api.
 *//*from  ww  w  .j ava 2 s .c om*/
private List<Item> getItemsFromIds(List<String> itemIds, ReadStatus readStatus) throws IOException {

    // Essentially the process here is to identify which items are not available through the
    // ItemCache, and to then retrieve these items in-bulk from the api.

    // Create a list of the same size as the list of item ids in which we will place the
    // Item objects.
    List<Item> returnedItems = new ArrayList<Item>(Collections.nCopies(itemIds.size(), (Item) null));

    // A map holding the ids of uncached items and their indexes in the returnedItems list.
    Map<String, Integer> idsAndIndexesOfUncachedItems = new HashMap<String, Integer>();

    for (int i = 0; i < itemIds.size(); i++) {
        Item cachedItemForThisId = this.itemCache.get(itemIds.get(i));

        if (cachedItemForThisId == null) {
            idsAndIndexesOfUncachedItems.put(itemIds.get(i), i);
        } else {
            cachedItemForThisId.setReadStatus(readStatus);
            returnedItems.add(i, cachedItemForThisId);
        }
    }

    List<Item> itemsNotFoundInCache = this
            .getItemsFromApi(new ArrayList<String>(idsAndIndexesOfUncachedItems.keySet()), readStatus);

    for (Item currentItem : itemsNotFoundInCache) {
        returnedItems.set(idsAndIndexesOfUncachedItems.get(currentItem.getId()), currentItem);
    }

    return returnedItems;
}

From source file:edu.stanford.slac.archiverappliance.PB.data.StatusSeverityTest.java

private DBR getJCASampleValue(ArchDBRTypes type, int value, int severity, int status) {
    switch (type) {
    case DBR_SCALAR_STRING:
        DBR_TIME_String retvalss = new DBR_TIME_String(new String[] { Integer.toString(value) });
        retvalss.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvalss.setSeverity(severity);//from  w ww  .j a  va  2s .c o  m
        retvalss.setStatus(status);
        return retvalss;
    case DBR_SCALAR_SHORT:
        DBR_TIME_Short retvalsh;
        if (0 <= value && value < 1000) {
            // Check for some numbers around the minimum value
            retvalsh = new DBR_TIME_Short(new short[] { (short) (Short.MIN_VALUE + value) });
        } else if (1000 <= value && value < 2000) {
            // Check for some numbers around the maximum value
            retvalsh = new DBR_TIME_Short(new short[] { (short) (Short.MAX_VALUE - (value - 1000)) });
        } else {
            // Check for some numbers around 0
            retvalsh = new DBR_TIME_Short(new short[] { (short) (value - 2000) });
        }
        retvalsh.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvalsh.setSeverity(severity);
        retvalsh.setStatus(status);
        return retvalsh;
    case DBR_SCALAR_FLOAT:
        DBR_TIME_Float retvalfl;
        if (0 <= value && value < 1000) {
            // Check for some numbers around the minimum value
            retvalfl = new DBR_TIME_Float(new float[] { Float.MIN_VALUE + value });
        } else if (1000 <= value && value < 2000) {
            // Check for some numbers around the maximum value
            retvalfl = new DBR_TIME_Float(new float[] { Float.MAX_VALUE - (value - 1000) });
        } else {
            // Check for some numbers around 0. Divide by a large number to make sure we cater to the number of precision digits
            retvalfl = new DBR_TIME_Float(new float[] { (value - 2000.0f) / value });
        }
        retvalfl.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvalfl.setSeverity(severity);
        retvalfl.setStatus(status);
        return retvalfl;
    case DBR_SCALAR_ENUM:
        DBR_TIME_Enum retvalen;
        retvalen = new DBR_TIME_Enum(new short[] { (short) (value) });
        retvalen.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvalen.setSeverity(severity);
        retvalen.setStatus(status);
        return retvalen;
    case DBR_SCALAR_BYTE:
        DBR_TIME_Byte retvalby;
        retvalby = new DBR_TIME_Byte(new byte[] { ((byte) (value % 255)) });
        retvalby.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvalby.setSeverity(severity);
        retvalby.setStatus(status);
        return retvalby;
    case DBR_SCALAR_INT:
        DBR_TIME_Int retvalint;
        if (0 <= value && value < 1000) {
            // Check for some numbers around the minimum value
            retvalint = new DBR_TIME_Int(new int[] { Integer.MIN_VALUE + value });
        } else if (1000 <= value && value < 2000) {
            // Check for some numbers around the maximum value
            retvalint = new DBR_TIME_Int(new int[] { Integer.MAX_VALUE - (value - 1000) });
        } else {
            // Check for some numbers around 0
            retvalint = new DBR_TIME_Int(new int[] { (value - 2000) });
        }
        retvalint.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvalint.setSeverity(severity);
        retvalint.setStatus(status);
        return retvalint;
    case DBR_SCALAR_DOUBLE:
        DBR_TIME_Double retvaldb;
        if (0 <= value && value < 1000) {
            // Check for some numbers around the minimum value
            retvaldb = new DBR_TIME_Double(new double[] { (Double.MIN_VALUE + value) });
        } else if (1000 <= value && value < 2000) {
            // Check for some numbers around the maximum value
            retvaldb = new DBR_TIME_Double(new double[] { (Double.MAX_VALUE - (value - 1000)) });
        } else {
            // Check for some numbers around 0. Divide by a large number to make sure we cater to the number of precision digits
            retvaldb = new DBR_TIME_Double(new double[] { ((value - 2000.0) / (value * 1000000)) });
        }
        retvaldb.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvaldb.setSeverity(severity);
        retvaldb.setStatus(status);
        return retvaldb;
    case DBR_WAVEFORM_STRING:
        DBR_TIME_String retvst;
        // Varying number of copies of a typical value
        retvst = new DBR_TIME_String(
                Collections.nCopies(value, Integer.toString(value)).toArray(new String[0]));
        retvst.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvst.setSeverity(severity);
        retvst.setStatus(status);
        return retvst;
    case DBR_WAVEFORM_SHORT:
        DBR_TIME_Short retvsh;
        retvsh = new DBR_TIME_Short(
                ArrayUtils.toPrimitive(Collections.nCopies(1, (short) value).toArray(new Short[0])));
        retvsh.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvsh.setSeverity(severity);
        retvsh.setStatus(status);
        return retvsh;
    case DBR_WAVEFORM_FLOAT:
        DBR_TIME_Float retvf;
        // Varying number of copies of a typical value
        retvf = new DBR_TIME_Float(ArrayUtils.toPrimitive(
                Collections.nCopies(value, (float) Math.cos(value * Math.PI / 3600)).toArray(new Float[0])));
        retvf.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvf.setSeverity(severity);
        retvf.setStatus(status);
        return retvf;
    case DBR_WAVEFORM_ENUM:
        DBR_TIME_Enum retven;
        retven = new DBR_TIME_Enum(
                ArrayUtils.toPrimitive(Collections.nCopies(1024, (short) value).toArray(new Short[0])));
        retven.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retven.setSeverity(severity);
        retven.setStatus(status);
        return retven;
    case DBR_WAVEFORM_BYTE:
        DBR_TIME_Byte retvb;
        // Large number of elements in the array
        retvb = new DBR_TIME_Byte(ArrayUtils
                .toPrimitive(Collections.nCopies(65536 * value, ((byte) (value % 255))).toArray(new Byte[0])));
        retvb.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvb.setSeverity(severity);
        retvb.setStatus(status);
        return retvb;
    case DBR_WAVEFORM_INT:
        DBR_TIME_Int retvint;
        // Varying number of copies of a typical value
        retvint = new DBR_TIME_Int(
                ArrayUtils.toPrimitive(Collections.nCopies(value, value * value).toArray(new Integer[0])));
        retvint.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvint.setSeverity(severity);
        retvint.setStatus(status);
        return retvint;
    case DBR_WAVEFORM_DOUBLE:
        DBR_TIME_Double retvd;
        // Varying number of copies of a typical value
        retvd = new DBR_TIME_Double(ArrayUtils.toPrimitive(
                Collections.nCopies(value, Math.sin(value * Math.PI / 3600)).toArray(new Double[0])));
        retvd.setTimeStamp(convertSecondsIntoYear2JCATimeStamp(value));
        retvd.setSeverity(severity);
        retvd.setStatus(status);
        return retvd;
    case DBR_V4_GENERIC_BYTES:
        throw new RuntimeException("Currently don't support " + type + " when generating sample data");
    default:
        throw new RuntimeException("We seemed to have missed a DBR type when generating sample data");
    }
}

From source file:com.streamsets.pipeline.stage.destination.cassandra.CassandraTarget.java

@Override
protected List<ConfigIssue> init() {
    List<ConfigIssue> issues = super.init();
    errorRecordHandler = new DefaultErrorRecordHandler(getContext());

    Target.Context context = getContext();
    if (addresses.isEmpty()) {
        issues.add(context.createConfigIssue(Groups.CASSANDRA.name(), "contactNodes", Errors.CASSANDRA_00));
    }/* w ww.j  a v  a 2 s.c o m*/

    for (String address : addresses) {
        if (address.isEmpty()) {
            issues.add(context.createConfigIssue(Groups.CASSANDRA.name(), "contactNodes", Errors.CASSANDRA_01));
        }
    }

    contactPoints = new ArrayList<>(addresses.size());
    for (String address : addresses) {
        if (null == address) {
            LOG.warn("A null value was passed in as a contact point.");
            // This isn't valid but InetAddress won't complain so we skip this entry.
            continue;
        }

        try {
            contactPoints.add(InetAddress.getByName(address));
        } catch (UnknownHostException e) {
            issues.add(context.createConfigIssue(Groups.CASSANDRA.name(), "contactNodes", Errors.CASSANDRA_04,
                    address));
        }
    }

    if (contactPoints.size() < 1) {
        issues.add(context.createConfigIssue(Groups.CASSANDRA.name(), "contactNodes", Errors.CASSANDRA_00));
    }

    if (!qualifiedTableName.contains(".")) {
        issues.add(
                context.createConfigIssue(Groups.CASSANDRA.name(), "qualifiedTableName", Errors.CASSANDRA_02));
    } else {
        if (checkCassandraReachable(issues)) {
            List<String> invalidColumns = checkColumnMappings();
            if (invalidColumns.size() != 0) {
                issues.add(context.createConfigIssue(Groups.CASSANDRA.name(), "columnNames",
                        Errors.CASSANDRA_08, Joiner.on(", ").join(invalidColumns)));
            }
        }
    }

    if (issues.isEmpty()) {
        cluster = Cluster.builder().addContactPoints(contactPoints).withCompression(compression).withPort(port)
                // If authentication is disabled on the C* cluster, this method has no effect.
                .withCredentials(username, password).build();

        try {
            session = cluster.connect();

            statementCache = CacheBuilder.newBuilder()
                    // No expiration as prepared statements are good for the entire session.
                    .build(new CacheLoader<SortedSet<String>, PreparedStatement>() {
                        @Override
                        public PreparedStatement load(SortedSet<String> columns) {
                            // The INSERT query we're going to perform (parameterized).
                            SortedSet<String> statementColumns = new TreeSet<>();
                            for (String fieldPath : columnMappings.keySet()) {
                                final String fieldName = fieldPath.replaceAll("/", "");
                                if (columns.contains(fieldName)) {
                                    statementColumns.add(fieldName);
                                }
                            }
                            final String query = String.format("INSERT INTO %s (%s) VALUES (%s);",
                                    qualifiedTableName, Joiner.on(", ").join(statementColumns),
                                    Joiner.on(", ").join(Collections.nCopies(statementColumns.size(), "?")));
                            LOG.trace("Prepared Query: {}", query);
                            return session.prepare(query);
                        }
                    });
        } catch (NoHostAvailableException | AuthenticationException | IllegalStateException e) {
            issues.add(context.createConfigIssue(null, null, Errors.CASSANDRA_03, e.toString()));
        }
    }
    return issues;
}

From source file:org.modmine.web.HeatMapController.java

private String getJSONString(Model model, InterMineBag bag, PathQueryExecutor executor, String expressionType,
        String conditionType) {//from  w  w w .  j ava  2  s.co m

    String expressionScoreJSON = null;

    // Key: gene symbol or PID - Value: list of ExpressionScore objs
    Map<String, List<ExpressionScore>> expressionScoreMap = new LinkedHashMap<String, List<ExpressionScore>>();

    PathQuery query = new PathQuery(model);
    query = queryExpressionScore(bag, conditionType, query);

    ExportResultsIterator result = executor.execute(query);
    LOG.debug("GGS QUERY: -->" + query + "<--");

    List<String> conditions = getConditionsList(conditionType);

    while (result.hasNext()) {
        List<ResultElement> row = result.next();

        String id = (String) row.get(0).getField();
        String symbol = (String) row.get(1).getField();
        Double score = (Double) row.get(2).getField();
        String condition = (String) row.get(3).getField();
        //            String dCCid = (String) row.get(4).getField();

        if (symbol == null) {
            symbol = id;
        }
        // should be fine with release 4.2 of canvasxpress
        //            symbol = fixSymbol(symbol);

        if (!expressionScoreMap.containsKey(symbol)) {
            // Create a list with space for n (size of conditions) ExpressionScore
            List<ExpressionScore> expressionScoreList = new ArrayList<ExpressionScore>(
                    Collections.nCopies(conditions.size(), new ExpressionScore()));
            ExpressionScore aScore = new ExpressionScore(condition, score, id, symbol);

            expressionScoreList.set(conditions.indexOf(condition), aScore);
            expressionScoreMap.put(symbol, expressionScoreList);

        } else {
            ExpressionScore aScore = new ExpressionScore(condition, score, id, symbol);
            expressionScoreMap.get(symbol).set(conditions.indexOf(condition), aScore);
        }
    }

    expressionScoreJSON = parseToJSON(StringUtils.capitalize(conditionType), expressionScoreMap);

    return expressionScoreJSON;

}

From source file:de.bund.bfr.knime.pmm.common.chart.Plotable.java

public double[][] getPoints(String paramX, String paramY, String unitX, String unitY, String transformX,
        String transformY, Map<String, Integer> choice) throws ConvertException {
    List<Double> xList = valueLists.get(paramX);
    List<Double> yList = valueLists.get(paramY);

    if (xList == null || yList == null) {
        return null;
    }/*from  ww  w. j  a v  a2s  . c o m*/

    List<Boolean> usedPoints = new ArrayList<>(Collections.nCopies(xList.size(), true));

    if (type == BOTH_STRICT || type == DATASET_STRICT) {
        for (String arg : functionArguments.keySet()) {
            if (!arg.equals(paramX) && valueLists.containsKey(arg)) {
                Double fixedValue = functionArguments.get(arg).get(choice.get(arg));
                List<Double> values = valueLists.get(arg);

                for (int i = 0; i < values.size(); i++) {
                    if (!fixedValue.equals(values.get(i))) {
                        usedPoints.set(i, false);
                    }
                }
            }
        }

        if (!usedPoints.contains(true)) {
            return null;
        }
    }

    List<Point2D.Double> points = new ArrayList<>(xList.size());

    for (int i = 0; i < xList.size(); i++) {
        Double x = xList.get(i);
        Double y = yList.get(i);

        if (x != null) {
            x = convertToUnit(paramX, x, unitX);
            x = transform(x, transformX);
        }

        if (y != null) {
            y = convertToUnit(paramY, y, unitY);
            y = transform(y, transformY);
        }

        if (usedPoints.get(i) && isValidValue(x) && isValidValue(y)) {
            points.add(new Point2D.Double(x, y));
        }
    }

    Collections.sort(points, new Comparator<Point2D.Double>() {

        @Override
        public int compare(Point2D.Double p1, Point2D.Double p2) {
            return Double.compare(p1.x, p2.x);
        }
    });

    double[][] pointsArray = new double[2][points.size()];

    for (int i = 0; i < points.size(); i++) {
        pointsArray[0][i] = points.get(i).x;
        pointsArray[1][i] = points.get(i).y;
    }

    return pointsArray;
}

From source file:org.fineract.module.stellar.TestPaymentInSimpleNetwork.java

@Test
public void paymentSumApproachesCreditLimit() throws Exception {
    logger.info("paymentSumApproachesCreditLimit test begin");

    final BigDecimal transferIncrement = BigDecimal.valueOf(99.99);
    final BigDecimal lastBit = BigDecimal.valueOf(0.1);

    final AccountListener accountListener = new AccountListener(serverAddress, firstTenantId, secondTenantId);

    //Approach the creditMatcher limit, then go back down to zero.
    Collections.nCopies(10, transferIncrement).parallelStream()
            .forEach((transferAmount) -> makePayment(firstTenantId, firstTenantApiKey, secondTenantId,
                    ASSET_CODE, transferAmount));

    {/* w ww  . ja v a2 s . c o m*/
        final List<AccountListener.CreditMatcher> transfers = new ArrayList<>();
        transfers.addAll(Collections.nCopies(10,
                creditMatcher(secondTenantId, transferIncrement, ASSET_CODE, firstTenantId)));

        accountListener.waitForCredits(PAY_WAIT * 3, transfers);
    }

    checkBalance(secondTenantId, secondTenantApiKey, ASSET_CODE, tenantVaultStellarAddress(secondTenantId),
            VAULT_BALANCE);

    checkBalance(secondTenantId, secondTenantApiKey, ASSET_CODE, tenantVaultStellarAddress(firstTenantId),
            transferIncrement.multiply(BigDecimal.TEN));

    logger.info("paymentSumApproachesCreditLimit transfers back");
    Collections.nCopies(10, transferIncrement).parallelStream()
            .forEach((transferAmount) -> makePayment(secondTenantId, secondTenantApiKey, firstTenantId,
                    ASSET_CODE, transferAmount));

    {
        final List<AccountListener.CreditMatcher> transfers = new ArrayList<>();
        transfers.addAll(Collections.nCopies(10, creditMatcher(firstTenantId, transferIncrement, ASSET_CODE,
                vaultMatcher(firstTenantId, secondTenantId))));

        accountListener.waitForCredits(PAY_WAIT * 3, transfers);

        accountListener.waitForCreditsToAccumulate(PAY_WAIT * 3,
                creditMatcher(secondTenantId, transferIncrement.multiply(BigDecimal.TEN), ASSET_CODE,
                        vaultMatcher(firstTenantId, secondTenantId)));

        accountListener.waitForCreditsToAccumulate(PAY_WAIT * 3,
                creditMatcher(firstTenantId, transferIncrement.multiply(BigDecimal.TEN), ASSET_CODE,
                        vaultMatcher(firstTenantId, secondTenantId)));
    }

    checkBalance(firstTenantId, firstTenantApiKey, ASSET_CODE, tenantVaultStellarAddress(secondTenantId),
            BigDecimal.ZERO);
    checkBalance(secondTenantId, secondTenantApiKey, ASSET_CODE, tenantVaultStellarAddress(firstTenantId),
            BigDecimal.ZERO);

    //Approach the creditMatcher limit again, then go to exactly the creditMatcher limit
    Collections.nCopies(10, transferIncrement).parallelStream()
            .forEach((transferAmount) -> makePayment(firstTenantId, firstTenantApiKey, secondTenantId,
                    ASSET_CODE, transferAmount));
    makePayment(firstTenantId, firstTenantApiKey, secondTenantId, ASSET_CODE, lastBit);

    {
        final List<AccountListener.CreditMatcher> transfers = new ArrayList<>();
        transfers.addAll(Collections.nCopies(10,
                creditMatcher(secondTenantId, transferIncrement, ASSET_CODE, firstTenantId)));
        transfers.add(creditMatcher(secondTenantId, lastBit, ASSET_CODE, firstTenantId));

        accountListener.waitForCredits(PAY_WAIT * 3, transfers);
    }

    checkBalance(secondTenantId, secondTenantApiKey, ASSET_CODE, tenantVaultStellarAddress(firstTenantId),
            TRUST_LIMIT);

    //Now try to go over the creditMatcher limit.
    makePayment(firstTenantId, firstTenantApiKey, secondTenantId, ASSET_CODE, lastBit);

    accountListener.waitForCredits(PAY_WAIT,
            creditMatcher(secondTenantId, lastBit, ASSET_CODE, vaultMatcher(firstTenantId, secondTenantId)));

    checkBalance(secondTenantId, secondTenantApiKey, ASSET_CODE, tenantVaultStellarAddress(firstTenantId),
            TRUST_LIMIT);

    //Zero out balance for next test.
    makePayment(secondTenantId, secondTenantApiKey, firstTenantId, ASSET_CODE, TRUST_LIMIT);
    accountListener.waitForCredits(PAY_WAIT,
            creditMatcher(firstTenantId, TRUST_LIMIT, ASSET_CODE, vaultMatcher(firstTenantId, secondTenantId)));
}