Example usage for java.util.stream Collectors joining

List of usage examples for java.util.stream Collectors joining

Introduction

In this page you can find the example usage for java.util.stream Collectors joining.

Prototype

public static Collector<CharSequence, ?, String> joining(CharSequence delimiter) 

Source Link

Document

Returns a Collector that concatenates the input elements, separated by the specified delimiter, in encounter order.

Usage

From source file:Main.java

public static void main(String[] args) {
    Map<Gender, Map<Object, String>> personsByGenderAndDobMonth = Employee.persons().stream().collect(
            Collectors.groupingBy(Employee::getGender, Collectors.groupingBy(p -> p.getDob().getMonth(),
                    Collectors.mapping(Employee::getName, Collectors.joining(", ")))));

    System.out.println(personsByGenderAndDobMonth);
}

From source file:Main.java

public static void main(String[] args) {
    Map<Month, String> dobCalendar = Employee.persons().stream()
            .collect(Collectors.collectingAndThen(Collectors.groupingBy(p -> p.getDob().getMonth(),
                    Collectors.mapping(Employee::getName, Collectors.joining(" "))), result -> {
                        for (Month m : Month.values()) {
                            result.putIfAbsent(m, "None");
                        }// ww  w  .j a v  a  2 s  . c  o  m
                        return Collections.unmodifiableMap(new TreeMap<>(result));
                    }));

    dobCalendar.entrySet().forEach(System.out::println);
}

From source file:Main.java

public static void main(String[] args) {
    String names = Employee.persons() // The data source
            .stream() // Produces a  sequential  stream
            .filter(Employee::isMale) // Processed in serial
            .parallel() // Produces a  parallel  stream
            .map(Employee::getName) // Processed in parallel
            .collect(Collectors.joining(", ")); // Processed in parallel
    System.out.println(names);//  ww  w  .  java  2s  . c  om
}

From source file:Main.java

public static void main(String... args) {
    Stream<Food> menuStream = Food.menu.stream();

    StreamForker.Results results = new StreamForker<Food>(
            menuStream)/* w  w  w.jav a2  s  . c  o m*/
                    .fork("shortMenu", s -> s.map(
                            Food::getName).collect(
                                    Collectors.joining(", ")))
                    .fork("totalCalories", s -> s.mapToInt(Food::getCalories).sum())
                    .fork("mostCaloricFood",
                            s -> s.collect(Collectors
                                    .reducing((d1, d2) -> d1.getCalories() > d2.getCalories() ? d1 : d2)).get())
                    .fork("dishesByType", s -> s.collect(Collectors.groupingBy(Food::getType))).getResults();

    String shortMeny = results.get("shortMenu");
    int totalCalories = results.get("totalCalories");
    Food mostCaloricFood = results.get("mostCaloricFood");
    Map<Food.Type, List<Food>> dishesByType = results.get("dishesByType");

    System.out.println("Short menu: " + shortMeny);
    System.out.println("Total calories: " + totalCalories);
    System.out.println("Most caloric dish: " + mostCaloricFood);
    System.out.println("Foodes by type: " + dishesByType);

}

From source file:com.google.demo.translate.Translator.java

public static void main(String[] args) {
    parseInputs();/*from   w  ww  . j a  v  a2s.  com*/

    try {
        String headers = String.join(",", source,
                targets.stream().map(i -> i.toString()).collect(Collectors.joining(",")));

        Files.write(output, Arrays.asList(headers), UTF_8, APPEND, CREATE);

        List<String> texts = new ArrayList<>();
        while (it.hasNext()) {
            texts.add(preTranslationParser(it.next()));
            if (texts.size() == 10 || !it.hasNext()) {
                translate(texts);
                texts = new ArrayList<>();
            }
        }
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        LineIterator.closeQuietly(it);
    }
}

From source file:org.jaqpot.core.service.client.jpdi.JPDIClientFactory.java

public static void main(String[] args) throws IOException, InterruptedException, ExecutionException {
    CloseableHttpAsyncClient asyncClient = HttpAsyncClientBuilder.create().build();
    asyncClient.start();/* www . j a  v  a 2 s. co m*/
    HttpGet request = new HttpGet("http://www.google.com");
    request.addHeader("Accept", "text/html");
    Future f = asyncClient.execute(request, new FutureCallback<HttpResponse>() {
        @Override
        public void completed(HttpResponse t) {
            System.out.println("completed");

            try {
                String result = new BufferedReader(new InputStreamReader(t.getEntity().getContent())).lines()
                        .collect(Collectors.joining("\n"));
                System.out.println(result);
            } catch (IOException ex) {
                Logger.getLogger(JPDIClientFactory.class.getName()).log(Level.SEVERE, null, ex);
            } catch (UnsupportedOperationException ex) {
                Logger.getLogger(JPDIClientFactory.class.getName()).log(Level.SEVERE, null, ex);
            }
        }

        @Override
        public void failed(Exception excptn) {
            System.out.println("failed");
        }

        @Override
        public void cancelled() {
            System.out.println("cancelled");
        }
    });
    f.get();
    asyncClient.close();
}

From source file:de.jackwhite20.japs.server.Main.java

public static void main(String[] args) throws Exception {

    Config config = null;//from w  w  w.  ja  va 2  s  .  co m

    if (args.length > 0) {
        Options options = new Options();
        options.addOption("h", true, "Address to bind to");
        options.addOption("p", true, "Port to bind to");
        options.addOption("b", true, "The backlog");
        options.addOption("t", true, "Worker thread count");
        options.addOption("d", false, "If debug is enabled or not");
        options.addOption("c", true, "Add server as a cluster");
        options.addOption("ci", true, "Sets the cache check interval");
        options.addOption("si", true, "Sets the snapshot interval");

        CommandLineParser commandLineParser = new BasicParser();
        CommandLine commandLine = commandLineParser.parse(options, args);

        if (commandLine.hasOption("h") && commandLine.hasOption("p") && commandLine.hasOption("b")
                && commandLine.hasOption("t")) {

            List<ClusterServer> clusterServers = new ArrayList<>();

            if (commandLine.hasOption("c")) {
                for (String c : commandLine.getOptionValues("c")) {
                    String[] splitted = c.split(":");
                    clusterServers.add(new ClusterServer(splitted[0], Integer.parseInt(splitted[1])));
                }
            }

            config = new Config(commandLine.getOptionValue("h"),
                    Integer.parseInt(commandLine.getOptionValue("p")),
                    Integer.parseInt(commandLine.getOptionValue("b")), commandLine.hasOption("d"),
                    Integer.parseInt(commandLine.getOptionValue("t")), clusterServers,
                    (commandLine.hasOption("ci")) ? Integer.parseInt(commandLine.getOptionValue("ci")) : 300,
                    (commandLine.hasOption("si")) ? Integer.parseInt(commandLine.getOptionValue("si")) : -1);
        } else {
            System.out.println(
                    "Usage: java -jar japs-server.jar -h <Host> -p <Port> -b <Backlog> -t <Threads> [-c IP:Port IP:Port] [-d]");
            System.out.println(
                    "Example (with debugging enabled): java -jar japs-server.jar -h localhost -p 1337 -b 100 -t 4 -d");
            System.out.println(
                    "Example (with debugging enabled and cluster setup): java -jar japs-server.jar -h localhost -p 1337 -b 100 -t 4 -c localhost:1338 -d");
            System.exit(-1);
        }
    } else {
        File configFile = new File("config.json");
        if (!configFile.exists()) {
            try {
                Files.copy(JaPS.class.getClassLoader().getResourceAsStream("config.json"), configFile.toPath(),
                        StandardCopyOption.REPLACE_EXISTING);
            } catch (IOException e) {
                System.err.println("Unable to load default config!");
                System.exit(-1);
            }
        }

        try {
            config = new Gson().fromJson(
                    Files.lines(configFile.toPath()).map(String::toString).collect(Collectors.joining(" ")),
                    Config.class);
        } catch (IOException e) {
            System.err.println("Unable to load 'config.json' in current directory!");
            System.exit(-1);
        }
    }

    if (config == null) {
        System.err.println("Failed to create a Config!");
        System.err.println("Please check the program parameters or the 'config.json' file!");
    } else {
        System.err.println("Using Config: " + config);

        JaPS jaPS = new JaPS(config);
        jaPS.init();
        jaPS.start();
        jaPS.stop();
    }
}

From source file:com.ikanow.aleph2.example.external_harvester.services.ExternalProcessLaunchService.java

public static void main(String[] args) throws InstantiationException, IllegalAccessException,
        ClassNotFoundException, JsonProcessingException, IOException, InterruptedException, ExecutionException {
    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    // Get the context (unused here)

    final IHarvestContext context = ContextUtils.getHarvestContext(args[0]);

    final DataBucketBean bucket = context.getBucket().get();

    _logger.info("Launched context, eg bucket status = : "
            + BeanTemplateUtils.toJson(context.getBucketStatus(Optional.empty()).get()));
    _logger.info("Retrieved bucket from CON: " + BeanTemplateUtils.toJson(bucket));

    // Get the bucket (unused here)

    _logger.info("Retrieved arg from CLI: " + args[1]);

    // Check that joins the cluster if I request the data bucket store
    //context.getService(IManagementDbService.class, Optional.of("core_management_db")).get().getDataBucketStore();
    //(But not if it's in read only mode)
    final IManagementCrudService<DataBucketBean> bucket_service = context.getServiceContext()
            .getCoreManagementDbService().readOnlyVersion().getDataBucketStore();
    _logger.info("Getting Management DB and reading number of buckets = "
            + bucket_service.countObjects().get().intValue());

    // Demonstration of accessing (read only) library state information:

    final Tuple2<SharedLibraryBean, Optional<GlobalConfigBean>> lib_config = ExternalProcessHarvestTechnology
            .getConfig(context);// w w  w . j av  a2s.com
    _logger.info("Retrieved library configuration: "
            + lib_config._2().map(g -> BeanTemplateUtils.toJson(g).toString()).orElse("(no config)"));

    // 1) Preferred method of getting per library state: 
    final ICrudService<ProcessInfoBean> pid_crud = context
            .getGlobalHarvestTechnologyObjectStore(ProcessInfoBean.class, ProcessInfoBean.PID_COLLECTION_NAME);
    // 2) Lower level way:
    //final IManagementDbService core_db = context.getServiceContext().getCoreManagementDbService();
    //final ICrudService<ProcessInfoBean> pid_crud = core_db.getPerLibraryState(ProcessInfoBean.class, lib_config._1(), ProcessInfoBean.PID_COLLECTION_NAME);
    // 3) Alternatively (this construct is how you get per bucket state also):
    //final ICrudService<ProcessInfoBean> pid_crud = context.getBucketObjectStore(ProcessInfoBean.class, Optional.empty(), ProcessInfoBean.PID_COLLECTION_NAME, Optional.of(AssetStateDirectoryBean.StateDirectoryType.library));

    lib_config._2().ifPresent(gc -> {
        if (gc.store_pids_in_db())
            pid_crud.getObjectsBySpec(CrudUtils.allOf(ProcessInfoBean.class).when(ProcessInfoBean::bucket_name,
                    bucket.full_name())).thenAccept(cursor -> {
                        String pids = StreamSupport.stream(cursor.spliterator(), false).map(c -> c._id())
                                .collect(Collectors.joining(","));
                        _logger.info("Pids/hostnames for this bucket: " + pids);
                    }).exceptionally(err -> {
                        _logger.error("Failed to get bucket pids", err);
                        return null;
                    });
    });

    // Just run for 10 minutes as an experiment
    for (int i = 0; i < 60; ++i) {
        // Example of promoting data to next stage
        if ((MasterEnrichmentType.streaming == bucket.master_enrichment_type())
                || (MasterEnrichmentType.streaming_and_batch == bucket.master_enrichment_type())) {
            // Send an object to kafka
            final JsonNode json = mapper.createObjectNode().put("@timestamp", new Date().getTime())
                    .put("test_str", "test" + i).put("test_int", i);
            _logger.info("Sending object to kafka: " + json);
            context.sendObjectToStreamingPipeline(Optional.empty(), Either.left(json));
        }
        _logger.info("(sleeping: " + i + ")");
        try {
            Thread.sleep(10L * 1000L);
        } catch (Exception e) {
        }
    }
}

From source file:com.ikanow.aleph2.enrichment.utils.services.JsScriptEngineTestService.java

/** Entry point
 * @param args//from   www . j a  v  a 2  s .c om
 * @throws IOException 
 */
public static void main(String[] args) throws IOException {
    if (args.length < 3) {
        System.out
                .println("ARGS: <script-file> <input-file> <output-prefix> [{[len: <LEN>], [group: <GROUP>]}]");
    }

    // STEP 1: load script file

    final String user_script = Files.toString(new File(args[0]), Charsets.UTF_8);

    // STEP 2: get a stream for the JSON file

    final InputStream io_stream = new FileInputStream(new File(args[1]));

    // STEP 3: set up control if applicable

    Optional<JsonNode> json = Optional.of("").filter(__ -> args.length > 3).map(__ -> args[3])
            .map(Lambdas.wrap_u(j -> _mapper.readTree(j)));

    // STEP 4: set up the various objects

    final DataBucketBean bucket = Mockito.mock(DataBucketBean.class);

    final JsScriptEngineService service_under_test = new JsScriptEngineService();

    final LinkedList<ObjectNode> emitted = new LinkedList<>();
    final LinkedList<JsonNode> grouped = new LinkedList<>();
    final LinkedList<JsonNode> externally_emitted = new LinkedList<>();

    final IEnrichmentModuleContext context = Mockito.mock(IEnrichmentModuleContext.class, new Answer<Void>() {
        @SuppressWarnings("unchecked")
        public Void answer(InvocationOnMock invocation) {
            try {
                Object[] args = invocation.getArguments();
                if (invocation.getMethod().getName().equals("emitMutableObject")) {
                    final Optional<JsonNode> grouping = (Optional<JsonNode>) args[3];
                    if (grouping.isPresent()) {
                        grouped.add(grouping.get());
                    }
                    emitted.add((ObjectNode) args[1]);
                } else if (invocation.getMethod().getName().equals("externalEmit")) {
                    final DataBucketBean to = (DataBucketBean) args[0];
                    final Either<JsonNode, Map<String, Object>> out = (Either<JsonNode, Map<String, Object>>) args[1];
                    externally_emitted
                            .add(((ObjectNode) out.left().value()).put("__a2_bucket", to.full_name()));
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
            return null;
        }
    });

    final EnrichmentControlMetadataBean control = BeanTemplateUtils.build(EnrichmentControlMetadataBean.class)
            .with(EnrichmentControlMetadataBean::config,
                    new LinkedHashMap<String, Object>(
                            ImmutableMap.<String, Object>builder().put("script", user_script).build()))
            .done().get();

    service_under_test.onStageInitialize(context, bucket, control,
            Tuples._2T(ProcessingStage.batch, ProcessingStage.grouping), Optional.empty());

    final BeJsonParser json_parser = new BeJsonParser();

    // Run the file through

    final Stream<Tuple2<Long, IBatchRecord>> json_stream = StreamUtils
            .takeUntil(Stream.generate(() -> json_parser.getNextRecord(io_stream)), i -> null == i)
            .map(j -> Tuples._2T(0L, new BatchRecord(j)));

    service_under_test.onObjectBatch(json_stream, json.map(j -> j.get("len")).map(j -> (int) j.asLong(0L)),
            json.map(j -> j.get("group")));

    System.out.println("RESULTS: ");
    System.out.println("emitted: " + emitted.size());
    System.out.println("grouped: " + grouped.size());
    System.out.println("externally emitted: " + externally_emitted.size());
    Files.write(emitted.stream().map(j -> j.toString()).collect(Collectors.joining(";")),
            new File(args[2] + "emit.json"), Charsets.UTF_8);
    Files.write(grouped.stream().map(j -> j.toString()).collect(Collectors.joining(";")),
            new File(args[2] + "group.json"), Charsets.UTF_8);
    Files.write(externally_emitted.stream().map(j -> j.toString()).collect(Collectors.joining(";")),
            new File(args[2] + "external_emit.json"), Charsets.UTF_8);
}

From source file:Main.java

public static String saveIntegerListToXml(Collection<Integer> list) {
    return list.stream().map(String::valueOf).collect(Collectors.joining(" "));
}