Example usage for java.util.stream IntStream range

List of usage examples for java.util.stream IntStream range

Introduction

In this page you can find the example usage for java.util.stream IntStream range.

Prototype

public static IntStream range(int startInclusive, int endExclusive) 

Source Link

Document

Returns a sequential ordered IntStream from startInclusive (inclusive) to endExclusive (exclusive) by an incremental step of 1 .

Usage

From source file:com.simiacryptus.mindseye.lang.Tensor.java

/**
 * Map tensor.// ww  w .  j  av  a  2  s . com
 *
 * @param f the f
 * @return the tensor
 */
@Nullable
public Tensor map(@Nonnull final DoubleUnaryOperator f) {
    @Nullable
    final double[] data = getData();
    Tensor tensor = new Tensor(dimensions);
    @Nonnull
    final double[] cpy = tensor.getData();
    IntStream.range(0, data.length).parallel().forEach(i -> cpy[i] = f.applyAsDouble(data[i]));
    return tensor;
}

From source file:com.yahoo.bullet.storm.FilterBoltTest.java

@Test
public void testDistribution() {
    // 100 Records will be consumed
    BulletStormConfig config = new BulletStormConfig(DistributionTest.makeConfiguration(20, 128));
    bolt = ComponentUtils.prepare(new DonableFilterBolt(101, config), collector);

    Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42",
            makeAggregationQuery(DISTRIBUTION, 10, Distribution.Type.PMF, "field", null, null, null, null, 3),
            METADATA);/*from  w w  w.j av  a  2 s  . c  o m*/
    bolt.execute(query);

    IntStream.range(0, 101).mapToObj(i -> RecordBox.get().add("field", i).getRecord())
            .map(FilterBoltTest::makeRecordTuple).forEach(bolt::execute);

    Assert.assertEquals(collector.getEmittedCount(), 0);

    Tuple tick = TupleUtils.makeTuple(TupleClassifier.Type.TICK_TUPLE);
    bolt.execute(tick);
    bolt.execute(tick);

    Assert.assertEquals(collector.getEmittedCount(), 1);

    byte[] rawData = getRawPayloadOfNthTuple(1);
    Assert.assertNotNull(rawData);

    Distribution distribution = DistributionTest.makeDistribution(config,
            makeAttributes(Distribution.Type.PMF, 3), "field", 10, null);
    distribution.combine(rawData);

    List<BulletRecord> records = distribution.getRecords();

    BulletRecord expectedA = RecordBox.get()
            .add(RANGE_FIELD, NEGATIVE_INFINITY_START + SEPARATOR + 0.0 + END_EXCLUSIVE).add(COUNT_FIELD, 0.0)
            .add(PROBABILITY_FIELD, 0.0).getRecord();
    BulletRecord expectedB = RecordBox.get()
            .add(RANGE_FIELD, START_INCLUSIVE + 0.0 + SEPARATOR + 50.0 + END_EXCLUSIVE).add(COUNT_FIELD, 50.0)
            .add(PROBABILITY_FIELD, 50.0 / 101).getRecord();
    BulletRecord expectedC = RecordBox.get()
            .add(RANGE_FIELD, START_INCLUSIVE + 50.0 + SEPARATOR + 100.0 + END_EXCLUSIVE).add(COUNT_FIELD, 50.0)
            .add(PROBABILITY_FIELD, 50.0 / 101).getRecord();
    BulletRecord expectedD = RecordBox.get()
            .add(RANGE_FIELD, START_INCLUSIVE + 100.0 + SEPARATOR + POSITIVE_INFINITY_END).add(COUNT_FIELD, 1.0)
            .add(PROBABILITY_FIELD, 1.0 / 101).getRecord();
    Assert.assertEquals(records.get(0), expectedA);
    Assert.assertEquals(records.get(1), expectedB);
    Assert.assertEquals(records.get(2), expectedC);
    Assert.assertEquals(records.get(3), expectedD);
}

From source file:com.simiacryptus.mindseye.lang.Tensor.java

/**
 * Map index tensor.//from   w  ww .ja  v a 2 s  .  c o  m
 *
 * @param f the f
 * @return the tensor
 */
@Nullable
public Tensor mapIndex(@Nonnull final TupleOperator f) {
    return new Tensor(
            Tensor.getDoubles(IntStream.range(0, length()).mapToDouble(i -> f.eval(get(i), i)), length()),
            dimensions);
}

From source file:com.streamsets.pipeline.stage.origin.spooldir.TestSpoolDirSource.java

@Test
public void testHandleLastSourceOffsets() throws Exception {
    SpoolDirConfigBean conf = new SpoolDirConfigBean();
    conf.dataFormat = DataFormat.TEXT;/*w  w w. j  a  v a2s  .c  o  m*/
    conf.spoolDir = "/";
    conf.batchSize = 10;
    conf.overrunLimit = 100;
    conf.poolingTimeoutSecs = 1;
    conf.filePattern = "file-[0-9].log";
    conf.pathMatcherMode = PathMatcherMode.GLOB;
    conf.maxSpoolFiles = 10;
    conf.initialFileToProcess = null;
    conf.dataFormatConfig.compression = Compression.NONE;
    conf.dataFormatConfig.filePatternInArchive = "*";
    conf.errorArchiveDir = null;
    conf.postProcessing = PostProcessingOptions.NONE;
    conf.retentionTimeMins = 10;
    conf.dataFormatConfig.textMaxLineLen = 10;
    conf.dataFormatConfig.onParseError = OnParseError.ERROR;
    conf.dataFormatConfig.maxStackTraceLines = 0;
    conf.allowLateDirectory = false;
    conf.numberOfThreads = 1;

    SpoolDirSource source = new SpoolDirSource(conf);

    final int numOffset = 5;
    Map<String, String> lastSourceOffsets = new HashMap<>(numOffset);
    IntStream.range(0, numOffset).forEach(threadNumber -> lastSourceOffsets.put("file-" + threadNumber + ".log",
            Integer.toString(threadNumber)));

    Map<String, String> lastSourceOffset = new HashMap<>();
    lastSourceOffset.put(source.OFFSET_VERSION, OFFSET_VERSION_ONE);
    for (String fileName : lastSourceOffsets.keySet()) {
        Offset offset = new Offset(OFFSET_VERSION_ONE, fileName + "::" + lastSourceOffsets.get(fileName));
        lastSourceOffset.put(fileName, offset.getOffsetString());
    }

    Map<String, Offset> offsetMap = source.handleLastSourceOffset(lastSourceOffset, createTestContext());
    Assert.assertEquals(numOffset, offsetMap.size());

    for (String fileName : lastSourceOffsets.keySet()) {
        Assert.assertEquals(lastSourceOffsets.get(fileName), offsetMap.get(fileName).getOffset());
    }

    // get the very last file from the last source offset
    Assert.assertEquals("file-0.log", source.getLastSourceFileName());
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_LibraryJars.java

/** Builds a V2 library bean out of a V1 share
 * @param src_json/*from w  w w. j  a v  a 2  s .  c  o m*/
 * @return
 * @throws JsonParseException
 * @throws JsonMappingException
 * @throws IOException
 * @throws ParseException
 */
protected static SharedLibraryBean getLibraryBeanFromV1Share(final JsonNode src_json)
        throws JsonParseException, JsonMappingException, IOException, ParseException {

    final String[] description_lines = Optional.ofNullable(safeJsonGet("description", src_json).asText())
            .orElse("unknown").split("\r\n?|\n");

    final String _id = "v1_" + safeJsonGet(JsonUtils._ID, src_json).asText();
    final String created = safeJsonGet("created", src_json).asText();
    final String modified = safeJsonGet("modified", src_json).asText();
    final String display_name = safeJsonGet("title", src_json).asText();
    final String path_name = display_name;

    final List<String> description_lines_list = Arrays.asList(description_lines);

    // Find possible JSON config
    Optional<Tuple2<Integer, Integer>> json_config = IntStream.range(1, description_lines.length).boxed()
            .filter(i -> description_lines[i].trim().startsWith("{")).findFirst()
            .<Tuple2<Integer, Integer>>map(start -> {
                return IntStream.range(start + 1, description_lines.length).boxed()
                        .filter(i -> !description_lines[i].matches("^\\s*[{}\"'].*")).findFirst()
                        .<Tuple2<Integer, Integer>>map(end -> Tuples._2T(start, end))
                        .orElse(Tuples._2T(start, description_lines.length));
            });

    @SuppressWarnings("unchecked")
    final Optional<Map<String, Object>> json = json_config
            .map(t2 -> description_lines_list.stream().limit(t2._2()).skip(t2._1())
                    .collect(Collectors.joining("\n")))
            .map(Lambdas.wrap_u(s -> _mapper.readTree(s)))
            .<Map<String, Object>>map(j -> (Map<String, Object>) _mapper.convertValue(j, Map.class));
    ;

    final Set<String> tags = safeTruncate(description_lines[description_lines.length - 1], 5).toLowerCase()
            .startsWith("tags:")
                    ? new HashSet<String>(Arrays.asList(description_lines[description_lines.length - 1]
                            .replaceFirst("(?i)tags:\\s*", "").split("\\s*,\\s*")))
                    : Collections.emptySet();

    final String description = description_lines_list.stream()
            .limit(Optional.of(description_lines.length).map(n -> tags.isEmpty() ? n : n - 1) // skip over the tags if any
                    .get())
            .skip(json_config.map(Tuple2::_2).orElse(1)).collect(Collectors.joining("\n"));

    final LibraryType type = LibraryType.misc_archive;
    final String owner_id = safeJsonGet(JsonUtils._ID, safeJsonGet("owner", src_json)).asText();
    //final JsonNode comm_objs = safeJsonGet("communities", src_json); // collection of { _id: $oid } types
    final String misc_entry_point = description_lines[0];

    final SharedLibraryBean bean = BeanTemplateUtils.build(SharedLibraryBean.class)
            .with(SharedLibraryBean::_id, _id).with(SharedLibraryBean::created, parseJavaDate(created))
            .with(SharedLibraryBean::modified, parseJavaDate(modified))
            .with(SharedLibraryBean::display_name, display_name).with(SharedLibraryBean::path_name, path_name)
            .with(SharedLibraryBean::description, description).with(SharedLibraryBean::tags, tags)
            .with(SharedLibraryBean::type, type).with(SharedLibraryBean::misc_entry_point, misc_entry_point)
            .with(SharedLibraryBean::owner_id, owner_id)
            .with(SharedLibraryBean::library_config, json.orElse(null)).done().get();

    return bean;
}

From source file:io.pravega.controller.task.Stream.StreamMetadataTasks.java

@VisibleForTesting
CompletableFuture<CreateStreamStatus.Status> createStreamBody(String scope, String stream,
        StreamConfiguration config, long timestamp) {
    return this.streamMetadataStore.createStream(scope, stream, config, timestamp, null, executor)
            .thenComposeAsync(response -> {
                log.info("{}/{} created in metadata store", scope, stream);
                CreateStreamStatus.Status status = translate(response.getStatus());
                // only if its a new stream or an already existing non-active stream then we will create
                // segments and change the state of the stream to active.
                if (response.getStatus().equals(CreateStreamResponse.CreateStatus.NEW)
                        || response.getStatus().equals(CreateStreamResponse.CreateStatus.EXISTS_CREATING)) {
                    List<Integer> newSegments = IntStream
                            .range(0, response.getConfiguration().getScalingPolicy().getMinNumSegments())
                            .boxed().collect(Collectors.toList());
                    return notifyNewSegments(scope, stream, response.getConfiguration(), newSegments)
                            .thenCompose(y -> {
                                final OperationContext context = streamMetadataStore.createContext(scope,
                                        stream);

                                return withRetries(() -> {
                                    CompletableFuture<Void> future;
                                    if (config.getRetentionPolicy() != null) {
                                        future = streamMetadataStore.addUpdateStreamForAutoStreamCut(scope,
                                                stream, config.getRetentionPolicy(), context, executor);
                                    } else {
                                        future = CompletableFuture.completedFuture(null);
                                    }/*  w ww .  j  a  va  2  s . co m*/
                                    return future.thenCompose(v -> streamMetadataStore.setState(scope, stream,
                                            State.ACTIVE, context, executor));
                                }, executor).thenApply(z -> status);
                            });
                } else {
                    return CompletableFuture.completedFuture(status);
                }
            }, executor).handle((result, ex) -> {
                if (ex != null) {
                    Throwable cause = Exceptions.unwrap(ex);
                    if (cause instanceof StoreException.DataNotFoundException) {
                        return CreateStreamStatus.Status.SCOPE_NOT_FOUND;
                    } else {
                        log.warn("Create stream failed due to ", ex);
                        return CreateStreamStatus.Status.FAILURE;
                    }
                } else {
                    return result;
                }
            });
}

From source file:com.simiacryptus.mindseye.lang.Tensor.java

/**
 * Map parallel tensor./*  ww w.j  a  v  a 2 s.  c om*/
 *
 * @param f the f
 * @return the tensor
 */
@Nullable
public Tensor mapParallel(@Nonnull final DoubleUnaryOperator f) {
    @Nullable
    final double[] data = getData();
    return new Tensor(Tensor.getDoubles(IntStream.range(0, length()).mapToDouble(i -> f.applyAsDouble(data[i])),
            length()), dimensions);
}

From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java

/**
 * Runs the inference for Ungrouped MAP variable given an input search algorithm.
 * @param searchAlgorithm a valid {@link SearchAlgorithm} value.
 *//*  ww  w .j a v  a2  s .c  o  m*/
public void runInferenceUngroupedMAPVariable(SearchAlgorithm searchAlgorithm) {

    if (MAPvariable == null || MAPvarName == null) {
        System.out.println("Error: The MAP variable has not been set");
        System.exit(-30);
    }

    if (this.unfoldedStaticModel == null) {
        unfoldedStaticModel = DynamicToStaticBNConverter.convertDBNtoBN(model, nTimeSteps);
    }

    if (evidence != null && staticEvidence == null) {

        staticEvidence = new HashMapAssignment(unfoldedStaticModel.getNumberOfVars());

        evidence.stream().forEach(dynamicAssignment -> {
            int time = (int) dynamicAssignment.getTimeID();
            Set<Variable> dynAssigVariables = dynamicAssignment.getVariables();
            for (Variable dynVariable : dynAssigVariables) {
                Variable staticVariable = unfoldedStaticModel.getVariables()
                        .getVariableByName(dynVariable.getName() + "_t" + Integer.toString(time));
                double varValue = dynamicAssignment.getValue(dynVariable);
                staticEvidence.setValue(staticVariable, varValue);
            }

        });
    }

    InferenceAlgorithm staticModelInference;
    switch (searchAlgorithm) {
    case VMP:
        staticModelInference = new VMP();
        //((VMP)staticModelInference).setTestELBO(true);
        ((VMP) staticModelInference).setThreshold(0.0001);
        ((VMP) staticModelInference).setMaxIter(3000);
        break;

    case IS:
    default:
        ImportanceSamplingRobust importanceSampling = new ImportanceSamplingRobust();
        importanceSampling.setSampleSize(this.sampleSize);
        Random random = new Random((this.seed));
        importanceSampling.setSeed(random.nextInt());
        staticModelInference = importanceSampling;
        break;
    }

    staticModelInference.setParallelMode(this.parallelMode);
    staticModelInference.setModel(unfoldedStaticModel);

    if (searchAlgorithm == SearchAlgorithm.IS) {
        ((ImportanceSamplingRobust) staticModelInference).setVariablesAPosteriori(unfoldedStaticModel
                .getVariables().getListOfVariables().stream()
                .filter(variable -> variable.getName().contains(MAPvarName)).collect(Collectors.toList()));
    }
    if (evidence != null) {
        staticModelInference.setEvidence(staticEvidence);
    }
    staticModelInference.runInference();

    List<UnivariateDistribution> posteriorMAPDistributionsStaticModel = new ArrayList<>();
    IntStream.range(0, nTimeSteps).forEachOrdered(i -> {
        posteriorMAPDistributionsStaticModel.add(staticModelInference.getPosterior(i));
        //System.out.println("Ungrouped Posterior " + i + staticModelInference.getPosterior(i).toString());
    });

    allUngroupedPosteriorDistributions = posteriorMAPDistributionsStaticModel;

    double[] probabilities = posteriorMAPDistributionsStaticModel.stream()
            .map(dist -> argMax(dist.getParameters())).mapToDouble(array -> array[0]).toArray();
    double MAPsequenceProbability = Math.exp(Arrays.stream(probabilities).map(prob -> Math.log(prob)).sum());

    int[] MAPsequence = posteriorMAPDistributionsStaticModel.stream().map(dist -> argMax(dist.getParameters()))
            .mapToInt(array -> (int) array[1]).toArray();

    MAPestimate = new HashMapAssignment(nTimeSteps);
    IntStream.range(0, nTimeSteps).forEach(t -> {
        Variables variables = Serialization.deepCopy(this.unfoldedStaticModel.getVariables());
        Variable currentVar;
        if (variables.getVariableByName(MAPvarName + "_t" + Integer.toString(t)) != null) {
            currentVar = variables.getVariableByName(MAPvarName + "_t" + Integer.toString(t));
        } else {
            currentVar = variables.newMultinomialVariable(MAPvarName + "_t" + Integer.toString(t),
                    MAPvariable.getNumberOfStates());
        }

        MAPestimate.setValue(currentVar, MAPsequence[t]);
    });
    MAPestimateLogProbability = Math.log(MAPsequenceProbability);
    this.MAPsequence = MAPsequence;
}

From source file:com.yahoo.bullet.storm.FilterBoltTest.java

@Test
public void testTopK() {
    // 16 records
    BulletStormConfig config = new BulletStormConfig(
            TopKTest.makeConfiguration(ErrorType.NO_FALSE_NEGATIVES, 32));
    bolt = ComponentUtils.prepare(new DonableFilterBolt(16, config), collector);

    Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42",
            makeAggregationQuery(TOP_K, 5, null, "cnt", Pair.of("A", ""), Pair.of("B", "foo")), METADATA);
    bolt.execute(query);/*w  ww.  j a v  a  2s .c o m*/

    IntStream.range(0, 8).mapToObj(i -> RecordBox.get().add("A", i).getRecord())
            .map(FilterBoltTest::makeRecordTuple).forEach(bolt::execute);
    IntStream.range(0, 6).mapToObj(i -> RecordBox.get().add("A", 0).getRecord())
            .map(FilterBoltTest::makeRecordTuple).forEach(bolt::execute);
    IntStream.range(0, 2).mapToObj(i -> RecordBox.get().add("A", 3).getRecord())
            .map(FilterBoltTest::makeRecordTuple).forEach(bolt::execute);

    Tuple tick = TupleUtils.makeTuple(TupleClassifier.Type.TICK_TUPLE);
    bolt.execute(tick);
    bolt.execute(tick);

    Assert.assertEquals(collector.getEmittedCount(), 1);

    byte[] rawData = getRawPayloadOfNthTuple(1);
    Assert.assertNotNull(rawData);

    Map<String, String> fields = new HashMap<>();
    fields.put("A", "");
    fields.put("B", "foo");
    TopK topK = TopKTest.makeTopK(config, makeAttributes("cnt", null), fields, 2, null);
    topK.combine(rawData);

    List<BulletRecord> records = topK.getRecords();
    Assert.assertEquals(records.size(), 2);

    BulletRecord expectedA = RecordBox.get().add("A", "0").add("foo", "null").add("cnt", 7L).getRecord();
    BulletRecord expectedB = RecordBox.get().add("A", "3").add("foo", "null").add("cnt", 3L).getRecord();

    Assert.assertEquals(records.get(0), expectedA);
    Assert.assertEquals(records.get(1), expectedB);
}

From source file:com.ikanow.aleph2.storage_service_hdfs.services.TestHdfsDataWriteService.java

public void test_writerService_end2end(Optional<String> secondary, boolean is_transient)
        throws InterruptedException, ExecutionException {
    final String temp_dir = System.getProperty("java.io.tmpdir") + File.separator;
    HfdsDataWriteService<TestBean> write_service = getWriter(
            "/test/writer/end2end/" + secondary.orElse("current") + "/", secondary, is_transient);

    //(Tidy up)//w w w .  java 2  s .c  o m
    try {
        FileUtils.deleteDirectory(new File(temp_dir + "/data/" + write_service._bucket.full_name()));
    } catch (Exception e) {
    }

    // Check lazy initialization only kicks in once      
    Optional<IBatchSubservice<TestBean>> x = write_service.getBatchWriteSubservice();
    assertEquals(x.get(), write_service._writer.get());
    Optional<IBatchSubservice<TestBean>> y = write_service.getBatchWriteSubservice();
    assertEquals(x.get(), y.get());

    IBatchSubservice<TestBean> batch = x.get();

    // Set up properties for testing:
    batch.setBatchProperties(Optional.of(1000), Optional.of(1000L), Optional.of(Duration.ofSeconds(2L)),
            Optional.of(3));

    Thread.sleep(1000L);
    // Check there are now 3 threads
    assertEquals(3, write_service._writer.get()._state._workers.getActiveCount());

    for (int i = 0; i < 20; ++i) {
        TestBean emit = new TestBean("id" + i, "val" + i);
        if (0 == (i % 2)) {
            if (0 == ((i / 2) % 2)) {
                batch.storeObject(emit);
            } else {
                CompletableFuture<Supplier<Object>> cf = write_service.storeObject(emit);
                assertEquals(null, cf.get().get());
            }
        } else {
            if (0 == ((i / 2) % 2)) {
                batch.storeObjects(Arrays.asList(emit));
            } else {
                CompletableFuture<Tuple2<Supplier<List<Object>>, Supplier<Long>>> cf = write_service
                        .storeObjects(Arrays.asList(emit));
                assertEquals(Collections.emptyList(), cf.get()._1().get());
                assertEquals(1L, cf.get()._2().get().longValue());
            }
        }
    }
    final String infix = is_transient ? IStorageService.TRANSIENT_DATA_SUFFIX_SECONDARY
            : IStorageService.STORED_DATA_SUFFIX_PROCESSED_SECONDARY;
    final String infix_name = is_transient ? "testj-testm" : "";

    // Check that initially the files are stored locally
    File init_dir = new File((temp_dir + "/data/" + write_service._bucket.full_name() + infix
            + secondary.orElse("current") + "/" + infix_name + "/.spooldir/").replace("/", File.separator));
    File final_dir = new File((temp_dir + "/data/" + write_service._bucket.full_name() + infix
            + secondary.orElse("current") + "/" + infix_name + "/all_time/").replace("/", File.separator));

    {
        int ii = 1;
        for (; ii <= 50; ++ii) {
            Thread.sleep(250L);
            if (6 == init_dir.list().length) {
                break;
            }
        }
        System.out.println("(exited from file system check after " + ii * 2.5 + " s)");
    }

    assertEquals("Needs to have 6 files, including 3x .crc: " + Arrays.toString(init_dir.list()), 6,
            init_dir.list().length); //*2 because CRC
    assertTrue(
            "Nothing in final dir: " + (final_dir.exists() ? Arrays.toString(final_dir.list()) : "(non-exist)"),
            !final_dir.exists() || final_dir.list().length == 0);

    {
        int ii = 1;
        for (; ii <= 50; ++ii) {
            Thread.sleep(2500L);
            if (0 == init_dir.list().length) {
                break;
            }
        }
        System.out.println("(exited from file system check after " + ii * 2.5 + " s)");
    }

    assertEquals(0, init_dir.list().length); //*2 because CRC
    assertEquals(6, final_dir.list().length); //*2 because CRC      

    // Change batch properties so that will segment (also check number of threads reduces)
    batch.setBatchProperties(Optional.of(10), Optional.of(1000L), Optional.of(Duration.ofSeconds(5L)),
            Optional.of(1));
    List<TestBean> l1 = IntStream.range(0, 8).boxed().map(i -> new TestBean("id" + i, "val" + i))
            .collect(Collectors.toList());
    List<TestBean> l2 = IntStream.range(8, 15).boxed().map(i -> new TestBean("id" + i, "val" + i))
            .collect(Collectors.toList());

    batch.storeObjects(l1);
    Thread.sleep(750L);
    assertEquals(6, final_dir.list().length); //*2 because CRC      
    System.out.println("Found: 6 files: " + Arrays.stream(final_dir.list()).collect(Collectors.joining(";")));

    batch.storeObjects(l2);
    System.out.println("Added 7 more objects at " + new Date());
    for (int jj = 0; jj < 5; ++jj) {
        Thread.sleep(1500L);
        if (final_dir.list().length > 6)
            break;
    }
    System.out.println("(Check init dir cleared: "
            + Arrays.stream(init_dir.list()).collect(Collectors.joining(";")) + ")");
    assertEquals("Should have 8 files: " + Arrays.stream(final_dir.list()).collect(Collectors.joining(";")), 8,
            final_dir.list().length); //*2 because CRC   

    System.out.println("(Deleting datastore and checking it's empty)");
    assertTrue("Deleted datastore: ", write_service.deleteDatastore().get()); // (just quick test since this uses handleBucketDeletion which is tested elsewhere...)
    String[] final_dir_list = Optional.ofNullable(final_dir.list()).orElse(new String[0]);
    assertEquals("Should have 0 files: " + Arrays.stream(final_dir_list).collect(Collectors.joining(";")), 0,
            final_dir_list.length); //*2 because CRC   
}