Example usage for java.util Arrays stream

List of usage examples for java.util Arrays stream

Introduction

In this page you can find the example usage for java.util Arrays stream.

Prototype

public static DoubleStream stream(double[] array) 

Source Link

Document

Returns a sequential DoubleStream with the specified array as its source.

Usage

From source file:se.uu.it.cs.recsys.dataloader.correction.CourseSelectionRecourseCorrecter.java

public static void main(String[] args) throws IOException {
    final String COURSE_SEL_REC_FILES_DIR = "file:C:\\Dev\\yong\\CourseRecommenderParent\\CourseRecommenderDataLoader\\src\\main\\resources\\data_source\\course_selection_records";

    Resource courseSelectionDir = new FileSystemResource(COURSE_SEL_REC_FILES_DIR);

    File[] recordFiles = courseSelectionDir.getFile().listFiles();

    Arrays.stream(recordFiles).forEach(file -> {
        try {//from   w  ww  . j a v  a  2  s  .  c o m
            correctCourseName(file, CourseNameCorrectionGenerator.getWrongToCorrectNamePairs());
        } catch (IOException ex) {
            LOGGER.error("Failed to correct names in file.{}", ex);
        }
    });
}

From source file:io.zz.TestSaveToCassandra.java

public static void main(String[] args) throws IOException, InterruptedException {
    SparkConf conf = new SparkConf().setAppName("aaaa").setMaster("spark://192.168.100.105:7077")
            .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
            .set("spark.cassandra.connection.host", "192.168.100.105");

    JavaStreamingContext streamingContext = new JavaStreamingContext(conf, new Duration(10000));
    JavaDStream<String> stream = streamingContext.socketTextStream("192.168.100.105", 9999);

    //        stream.count().print();
    SQLContext sql = SQLContext.getOrCreate(streamingContext.sparkContext().sc());

    Dataset<Name> ds = sql.createDataset(ImmutableList.of(new Name("a", "b")), Encoders.bean(Name.class));

    CassandraConnector cc = CassandraConnector.apply(conf);
    try (Session session = cc.openSession()) {
        String file = IOUtils.toString(TestSaveToCassandra.class.getResourceAsStream("/c.sql"));
        Arrays.stream(file.split(";")).map(s -> s.trim()).filter(s -> !s.isEmpty()).map(s -> s + ";")
                .forEach((String str) -> session.execute(str));
    }//  ww w  .  j av a 2 s  .  c  o  m

    //        ds.toDF().write().mode(SaveMode.Overwrite).option("truncate", "true").jdbc("", "", new Properties());
    JavaDStream<Name> map = stream.map(s -> new Name(s, "e"));

    map.foreachRDD((s, t) -> process(s));

    CassandraStreamingJavaUtil.javaFunctions(map)
            .writerBuilder("keyspace1", "name", CassandraJavaUtil.mapToRow(Name.class)).saveToCassandra();

    streamingContext.start();
    streamingContext.awaitTermination();
    streamingContext.stop();
}

From source file:org.rapidpm.microservice.optionals.service.ServiceWrapper.java

public static void main(String[] args) {
    boolean shutdown = Arrays.stream(args).anyMatch(s -> s.equals(SHUTDOWN));
    if (!shutdown) {
        startMicroservice(args);//from  www .  ja  v a 2s. c om
    } else {
        shutdownMicroservice();
    }
}

From source file:async.nio2.Main.java

public static void main(String[] args) throws IOException, InterruptedException, ExecutionException {

    if (args.length == 3) {
        PORT = Integer.valueOf(args[0]);
        NO_CLIENTS = Integer.valueOf(args[1]);
        NO_SAMPLES = Integer.valueOf(args[2]);
    }/*from ww  w.j a  v a 2 s  . c  o  m*/

    if (PORT < 0) {
        System.err.println("Error: port < 0");
        System.exit(1);
    }

    if (NO_CLIENTS < 1) {
        System.err.println("Error: #clients < 1");
        System.exit(1);
    }

    if (NO_SAMPLES < 1) {
        System.err.println("Error: #samples < 1");
        System.exit(1);
    }

    AsynchronousChannelGroup groupServer = AsynchronousChannelGroup
            .withThreadPool(Executors.newFixedThreadPool(1));
    AsynchronousChannelGroup groupClient = AsynchronousChannelGroup
            .withThreadPool(Executors.newFixedThreadPool(1));

    Server server = Server.newInstance(new InetSocketAddress("localhost", PORT), groupServer);
    InetSocketAddress localAddress = server.getLocalAddress();
    String hostname = localAddress.getHostName();
    int port = localAddress.getPort();

    ExecutorService es = Executors.newFixedThreadPool(2);

    System.out.printf("%03d clients on %s:%d, %03d runs each. All times in s.%n", NO_CLIENTS, hostname, port,
            NO_SAMPLES);
    range(0, NO_CLIENTS).unordered().parallel()
            .mapToObj(i -> CompletableFuture.supplyAsync(newClient(localAddress, groupClient), es).join())
            .map(array -> Arrays.stream(array).reduce(new DescriptiveStatistics(), Main::accumulate,
                    Main::combine))
            .map(Main::toEvaluationString).forEach(System.out::println);

    es.shutdown();
    es.awaitTermination(5, TimeUnit.SECONDS);

    groupClient.shutdown();
    groupClient.awaitTermination(5, TimeUnit.SECONDS);

    server.close();
    groupServer.shutdown();
    groupServer.awaitTermination(5, TimeUnit.SECONDS);
}

From source file:com.cloudera.oryx.app.traffic.TrafficUtil.java

public static void main(String[] args) throws Exception {
    if (args.length < 3) {
        System.err.println("usage: TrafficUtil [hosts] [requestIntervalMS] [threads] [... other args]");
        return;/*from   w w w  .j  a va  2 s  .  c o  m*/
    }

    String[] hostStrings = COMMA.split(args[0]);
    Preconditions.checkArgument(hostStrings.length >= 1);
    int requestIntervalMS = Integer.parseInt(args[1]);
    Preconditions.checkArgument(requestIntervalMS >= 0);
    int numThreads = Integer.parseInt(args[2]);
    Preconditions.checkArgument(numThreads >= 1);

    String[] otherArgs = new String[args.length - 3];
    System.arraycopy(args, 3, otherArgs, 0, otherArgs.length);

    List<URI> hosts = Arrays.stream(hostStrings).map(URI::create).collect(Collectors.toList());

    int perClientRequestIntervalMS = numThreads * requestIntervalMS;

    Endpoints alsEndpoints = new Endpoints(ALSEndpoint.buildALSEndpoints());
    AtomicLong requestCount = new AtomicLong();
    AtomicLong serverErrorCount = new AtomicLong();
    AtomicLong clientErrorCount = new AtomicLong();
    AtomicLong exceptionCount = new AtomicLong();

    long start = System.currentTimeMillis();
    ExecUtils.doInParallel(numThreads, numThreads, true, i -> {
        RandomGenerator random = RandomManager.getRandom(Integer.toString(i).hashCode() ^ System.nanoTime());
        ExponentialDistribution msBetweenRequests;
        if (perClientRequestIntervalMS > 0) {
            msBetweenRequests = new ExponentialDistribution(random, perClientRequestIntervalMS);
        } else {
            msBetweenRequests = null;
        }

        ClientConfig clientConfig = new ClientConfig();
        PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager();
        connectionManager.setMaxTotal(numThreads);
        connectionManager.setDefaultMaxPerRoute(numThreads);
        clientConfig.property(ApacheClientProperties.CONNECTION_MANAGER, connectionManager);
        clientConfig.connectorProvider(new ApacheConnectorProvider());
        Client client = ClientBuilder.newClient(clientConfig);

        try {
            while (true) {
                try {
                    WebTarget target = client.target("http://" + hosts.get(random.nextInt(hosts.size())));
                    Endpoint endpoint = alsEndpoints.chooseEndpoint(random);
                    Invocation invocation = endpoint.makeInvocation(target, otherArgs, random);

                    long startTime = System.currentTimeMillis();
                    Response response = invocation.invoke();
                    try {
                        response.readEntity(String.class);
                    } finally {
                        response.close();
                    }
                    long elapsedMS = System.currentTimeMillis() - startTime;

                    int statusCode = response.getStatusInfo().getStatusCode();
                    if (statusCode >= 400) {
                        if (statusCode >= 500) {
                            serverErrorCount.incrementAndGet();
                        } else {
                            clientErrorCount.incrementAndGet();
                        }
                    }

                    endpoint.recordTiming(elapsedMS);

                    if (requestCount.incrementAndGet() % 10000 == 0) {
                        long elapsed = System.currentTimeMillis() - start;
                        log.info("{}ms:\t{} requests\t({} client errors\t{} server errors\t{} exceptions)",
                                elapsed, requestCount.get(), clientErrorCount.get(), serverErrorCount.get(),
                                exceptionCount.get());
                        for (Endpoint e : alsEndpoints.getEndpoints()) {
                            log.info("{}", e);
                        }
                    }

                    if (msBetweenRequests != null) {
                        int desiredElapsedMS = (int) Math.round(msBetweenRequests.sample());
                        if (elapsedMS < desiredElapsedMS) {
                            Thread.sleep(desiredElapsedMS - elapsedMS);
                        }
                    }
                } catch (Exception e) {
                    exceptionCount.incrementAndGet();
                    log.warn("{}", e.getMessage());
                }
            }
        } finally {
            client.close();
        }
    });
}

From source file:examples.cnn.ImagesClassification.java

public static void main(String[] args) {

    SparkConf conf = new SparkConf();
    conf.setAppName("Images CNN Classification");
    conf.setMaster(String.format("local[%d]", NUM_CORES));
    conf.set(SparkDl4jMultiLayer.AVERAGE_EACH_ITERATION, String.valueOf(true));

    try (JavaSparkContext sc = new JavaSparkContext(conf)) {

        JavaRDD<String> raw = sc.textFile("data/images-data-rgb.csv");
        String first = raw.first();

        JavaPairRDD<String, String> labelData = raw.filter(f -> f.equals(first) == false).mapToPair(r -> {
            String[] tab = r.split(";");
            return new Tuple2<>(tab[0], tab[1]);
        });//  www . j  ava2 s  . c om

        Map<String, Long> labels = labelData.map(t -> t._1).distinct().zipWithIndex()
                .mapToPair(t -> new Tuple2<>(t._1, t._2)).collectAsMap();

        log.info("Number of labels {}", labels.size());
        labels.forEach((a, b) -> log.info("{}: {}", a, b));

        NetworkTrainer trainer = new NetworkTrainer.Builder().model(ModelLibrary.net1)
                .networkToSparkNetwork(net -> new SparkDl4jMultiLayer(sc, net)).numLabels(labels.size())
                .cores(NUM_CORES).build();

        JavaRDD<Tuple2<INDArray, double[]>> labelsWithData = labelData.map(t -> {
            INDArray label = FeatureUtil.toOutcomeVector(labels.get(t._1).intValue(), labels.size());
            double[] arr = Arrays.stream(t._2.split(" ")).map(normalize1).mapToDouble(Double::doubleValue)
                    .toArray();
            return new Tuple2<>(label, arr);
        });

        JavaRDD<Tuple2<INDArray, double[]>>[] splited = labelsWithData.randomSplit(new double[] { .8, .2 },
                seed);

        JavaRDD<DataSet> testDataset = splited[1].map(t -> {
            INDArray features = Nd4j.create(t._2, new int[] { 1, t._2.length });
            return new DataSet(features, t._1);
        }).cache();
        log.info("Number of test images {}", testDataset.count());

        JavaRDD<DataSet> plain = splited[0].map(t -> {
            INDArray features = Nd4j.create(t._2, new int[] { 1, t._2.length });
            return new DataSet(features, t._1);
        });

        /*
         * JavaRDD<DataSet> flipped = splited[0].randomSplit(new double[] { .5, .5 }, seed)[0].
         */
        JavaRDD<DataSet> flipped = splited[0].map(t -> {
            double[] arr = t._2;
            int idx = 0;
            double[] farr = new double[arr.length];
            for (int i = 0; i < arr.length; i += trainer.width) {
                double[] temp = Arrays.copyOfRange(arr, i, i + trainer.width);
                ArrayUtils.reverse(temp);
                for (int j = 0; j < trainer.height; ++j) {
                    farr[idx++] = temp[j];
                }
            }
            INDArray features = Nd4j.create(farr, new int[] { 1, farr.length });
            return new DataSet(features, t._1);
        });

        JavaRDD<DataSet> trainDataset = plain.union(flipped).cache();
        log.info("Number of train images {}", trainDataset.count());

        trainer.train(trainDataset, testDataset);
    }
}

From source file:examples.cnn.cifar.Cifar10Classification.java

public static void main(String[] args) {

    CifarReader.downloadAndExtract();//from  w w w. j a va2 s  .  c  o  m

    int numLabels = 10;

    SparkConf conf = new SparkConf();
    conf.setMaster(String.format("local[%d]", NUM_CORES));
    conf.setAppName("Cifar-10 CNN Classification");
    conf.set(SparkDl4jMultiLayer.AVERAGE_EACH_ITERATION, String.valueOf(true));

    try (JavaSparkContext sc = new JavaSparkContext(conf)) {

        NetworkTrainer trainer = new NetworkTrainer.Builder().model(ModelLibrary.net2)
                .networkToSparkNetwork(net -> new SparkDl4jMultiLayer(sc, net)).numLabels(numLabels)
                .cores(NUM_CORES).build();

        JavaPairRDD<String, PortableDataStream> files = sc.binaryFiles("data/cifar-10-batches-bin");

        JavaRDD<double[]> imagesTrain = files
                .filter(f -> ArrayUtils.contains(CifarReader.TRAIN_DATA_FILES, extractFileName.apply(f._1)))
                .flatMap(f -> CifarReader.rawDouble(f._2.open()));

        JavaRDD<double[]> imagesTest = files
                .filter(f -> CifarReader.TEST_DATA_FILE.equals(extractFileName.apply(f._1)))
                .flatMap(f -> CifarReader.rawDouble(f._2.open()));

        JavaRDD<DataSet> testDataset = imagesTest.map(i -> {
            INDArray label = FeatureUtil.toOutcomeVector(Double.valueOf(i[0]).intValue(), numLabels);
            double[] arr = Arrays.stream(ArrayUtils.remove(i, 0)).boxed().map(normalize2)
                    .mapToDouble(Double::doubleValue).toArray();
            INDArray features = Nd4j.create(arr, new int[] { 1, arr.length });
            return new DataSet(features, label);
        }).cache();
        log.info("Number of test images {}", testDataset.count());

        JavaPairRDD<INDArray, double[]> labelsWithDataTrain = imagesTrain.mapToPair(i -> {
            INDArray label = FeatureUtil.toOutcomeVector(Double.valueOf(i[0]).intValue(), numLabels);
            double[] arr = Arrays.stream(ArrayUtils.remove(i, 0)).boxed().map(normalize2)
                    .mapToDouble(Double::doubleValue).toArray();
            return new Tuple2<>(label, arr);
        });

        JavaRDD<DataSet> flipped = labelsWithDataTrain.map(t -> {
            double[] arr = t._2;
            int idx = 0;
            double[] farr = new double[arr.length];
            for (int i = 0; i < arr.length; i += trainer.getWidth()) {
                double[] temp = Arrays.copyOfRange(arr, i, i + trainer.getWidth());
                ArrayUtils.reverse(temp);
                for (int j = 0; j < trainer.getHeight(); ++j) {
                    farr[idx++] = temp[j];
                }
            }
            INDArray features = Nd4j.create(farr, new int[] { 1, farr.length });
            return new DataSet(features, t._1);
        });

        JavaRDD<DataSet> trainDataset = labelsWithDataTrain.map(t -> {
            INDArray features = Nd4j.create(t._2, new int[] { 1, t._2.length });
            return new DataSet(features, t._1);
        }).union(flipped).cache();
        log.info("Number of train images {}", trainDataset.count());

        trainer.train(trainDataset, testDataset);
    }
}

From source file:com.hortonworks.registries.storage.tool.sql.DatabaseUserInitializer.java

public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(Option.builder("c").numberOfArgs(1).longOpt(OPTION_CONFIG_FILE_PATH)
            .desc("Config file path").build());

    options.addOption(Option.builder("m").numberOfArgs(1).longOpt(OPTION_MYSQL_JAR_URL_PATH)
            .desc("Mysql client jar url to download").build());

    options.addOption(Option.builder().hasArg().longOpt(OPTION_ADMIN_JDBC_URL)
            .desc("JDBC url to connect DBMS via admin.").build());

    options.addOption(Option.builder().hasArg().longOpt(OPTION_ADMIN_DB_USER)
            .desc("Admin user name: should be able to create and grant privileges.").build());

    options.addOption(Option.builder().hasArg().longOpt(OPTION_ADMIN_PASSWORD)
            .desc("Admin user's password: should be able to create and grant privileges.").build());

    options.addOption(//from  w  w w .  j  av a  2 s .  c  om
            Option.builder().hasArg().longOpt(OPTION_TARGET_USER).desc("Name of target user.").build());

    options.addOption(
            Option.builder().hasArg().longOpt(OPTION_TARGET_PASSWORD).desc("Password of target user.").build());

    options.addOption(
            Option.builder().hasArg().longOpt(OPTION_TARGET_DATABASE).desc("Target database.").build());

    CommandLineParser parser = new BasicParser();
    CommandLine commandLine = parser.parse(options, args);

    String[] neededOptions = { OPTION_CONFIG_FILE_PATH, OPTION_MYSQL_JAR_URL_PATH, OPTION_ADMIN_JDBC_URL,
            OPTION_ADMIN_DB_USER, OPTION_ADMIN_PASSWORD, OPTION_TARGET_USER, OPTION_TARGET_PASSWORD,
            OPTION_TARGET_DATABASE };

    boolean optNotFound = Arrays.stream(neededOptions).anyMatch(opt -> !commandLine.hasOption(opt));
    if (optNotFound) {
        usage(options);
        System.exit(1);
    }

    String confFilePath = commandLine.getOptionValue(OPTION_CONFIG_FILE_PATH);
    String mysqlJarUrl = commandLine.getOptionValue(OPTION_MYSQL_JAR_URL_PATH);

    Optional<AdminOptions> adminOptionsOptional = AdminOptions.from(commandLine);
    if (!adminOptionsOptional.isPresent()) {
        usage(options);
        System.exit(1);
    }

    AdminOptions adminOptions = adminOptionsOptional.get();

    Optional<TargetOptions> targetOptionsOptional = TargetOptions.from(commandLine);
    if (!targetOptionsOptional.isPresent()) {
        usage(options);
        System.exit(1);
    }

    TargetOptions targetOptions = targetOptionsOptional.get();

    DatabaseType databaseType = findDatabaseType(adminOptions.getJdbcUrl());

    Map<String, Object> conf;
    try {
        conf = Utils.readConfig(confFilePath);
    } catch (IOException e) {
        System.err.println("Error occurred while reading config file: " + confFilePath);
        System.exit(1);
        throw new IllegalStateException("Shouldn't reach here");
    }

    String bootstrapDirPath = null;
    try {
        bootstrapDirPath = System.getProperty("bootstrap.dir");
        Proxy proxy = Proxy.NO_PROXY;
        String httpProxyUrl = (String) conf.get(HTTP_PROXY_URL);
        String httpProxyUsername = (String) conf.get(HTTP_PROXY_USERNAME);
        String httpProxyPassword = (String) conf.get(HTTP_PROXY_PASSWORD);
        if ((httpProxyUrl != null) && !httpProxyUrl.isEmpty()) {
            URL url = new URL(httpProxyUrl);
            proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(url.getHost(), url.getPort()));
            if ((httpProxyUsername != null) && !httpProxyUsername.isEmpty()) {
                Authenticator.setDefault(getBasicAuthenticator(url.getHost(), url.getPort(), httpProxyUsername,
                        httpProxyPassword));
            }
        }

        StorageProviderConfiguration storageProperties = StorageProviderConfiguration.get(
                adminOptions.getJdbcUrl(), adminOptions.getUsername(), adminOptions.getPassword(),
                adminOptions.getDatabaseType());

        MySqlDriverHelper.downloadMySQLJarIfNeeded(storageProperties, bootstrapDirPath, mysqlJarUrl, proxy);
    } catch (Exception e) {
        System.err.println("Error occurred while downloading MySQL jar. bootstrap dir: " + bootstrapDirPath);
        System.exit(1);
        throw new IllegalStateException("Shouldn't reach here");
    }

    try (Connection conn = getConnectionViaAdmin(adminOptions)) {
        DatabaseCreator databaseCreator = DatabaseCreatorFactory.newInstance(adminOptions.getDatabaseType(),
                conn);
        UserCreator userCreator = UserCreatorFactory.newInstance(adminOptions.getDatabaseType(), conn);

        String database = targetOptions.getDatabase();
        String username = targetOptions.getUsername();

        createDatabase(databaseCreator, database);
        createUser(targetOptions, userCreator, username);
        grantPrivileges(databaseCreator, database, username);
    }
}

From source file:edu.vassar.cs.cmpu331.tvi.Main.java

public static void main(String[] args) {
    Options options = new Options().addOption("t", "trace", false, "Enable tracing.")
            .addOption("d", "debug", false, "Enable debug output.")
            .addOption("s", "size", true, "Sets amount of memory available.")
            .addOption("v", "version", false, "Prints the TVI version number.")
            .addOption("r", "renumber", true, "Renumbers the lines in a files.")
            .addOption("h", "help", false, "Prints this help message");
    CommandLineParser parser = new DefaultParser();
    try {//from   w  w  w  .j av a2 s  .  c  om
        CommandLine opts = parser.parse(options, args);
        if (opts.hasOption('h')) {
            help(options);
            return;
        }
        if (opts.hasOption('v')) {
            System.out.println();
            System.out.println("The Vassar Interpreter v" + Version.getVersion());
            System.out.println(COPYRIGHT);
            System.out.println();
            return;
        }
        if (opts.hasOption('r')) {
            int returnCode = 0;
            try {
                renumber(opts.getOptionValue('r'));
            } catch (IOException e) {
                e.printStackTrace();
                returnCode = 1;
            }
            System.exit(returnCode);
        }
        files = opts.getArgs();
        if (files.length == 0) {
            System.out.println("ERROR: No file names given.");
            help(options);
            System.exit(1);
        }
        if (opts.hasOption('s')) {
            try {
                memory = Integer.parseInt(opts.getOptionValue('s'));
            } catch (NumberFormatException e) {
                System.out.println("ERROR: Invalid --size parameter.");
                help(options);
                System.exit(1);
            }
        }
        if (opts.hasOption('t')) {
            tracing = true;
        }
        if (opts.hasOption('d')) {
            debugging = true;
        }
    } catch (ParseException e) {
        System.out.println(e.getMessage());
        help(options);
        System.exit(1);
    }
    Main app = new Main();
    Arrays.stream(files).forEach(app::run);
}

From source file:act.installer.reachablesexplorer.WikiWebServicesExporter.java

public static void main(String[] args) throws Exception {
    CLIUtil cliUtil = new CLIUtil(WikiWebServicesExporter.class, HELP_MESSAGE, OPTION_BUILDERS);
    CommandLine cl = cliUtil.parseCommandLine(args);

    String host = cl.getOptionValue(OPTION_INPUT_DB_HOST, DEFAULT_HOST);
    Integer port = Integer.parseInt(cl.getOptionValue(OPTION_INPUT_DB_PORT, DEFAULT_PORT));
    String dbName = cl.getOptionValue(OPTION_INPUT_DB, DEFAULT_DB);
    String collection = cl.getOptionValue(OPTION_INPUT_DB_COLLECTION, DEFAULT_COLLECTION);
    String sequenceCollection = cl.getOptionValue(OPTION_INPUT_SEQUENCE_COLLECTION,
            DEFAULT_SEQUENCES_COLLECTION);

    LOGGER.info("Attempting to connect to DB %s:%d/%s, collection %s", host, port, dbName, collection);
    Loader loader = new Loader(host, port, UNUSED_SOURCE_DB, dbName, collection, sequenceCollection,
            DEFAULT_RENDERING_CACHE);/*from   w  w  w. jav a 2s. c  om*/

    JacksonDBCollection<Reachable, String> reachables = loader.getJacksonReachablesCollection();

    LOGGER.info("Connected to DB, reading reachables");

    List<Long> exportIds = !cl.hasOption(OPTION_EXPORT_SOME) ? Collections.emptyList()
            : Arrays.stream(cl.getOptionValues(OPTION_EXPORT_SOME)).map(Long::valueOf)
                    .collect(Collectors.toList());

    TSVWriter<String, String> tsvWriter = new TSVWriter<>(HEADER);
    tsvWriter.open(new File(cl.getOptionValue(OPTION_OUTPUT_FILE)));
    try {
        DBCursor<Reachable> cursor = exportIds.isEmpty() ? reachables.find()
                : reachables.find(DBQuery.in("_id", exportIds));
        int written = 0;
        while (cursor.hasNext()) {
            final Reachable r = cursor.next();

            Map<String, String> row = new HashMap<String, String>() {
                {
                    put("inchi", r.getInchi());
                    put("inchi_key", r.getInchiKey());
                    put("display_name", r.getPageName());
                    put("image_name", r.getStructureFilename());
                }
            };
            tsvWriter.append(row);
            tsvWriter.flush();
            written++;
        }
        LOGGER.info("Wrote %d reachables to output TSV", written);
    } finally {
        tsvWriter.close();
    }
}