Example usage for java.util.stream Collectors joining

List of usage examples for java.util.stream Collectors joining

Introduction

In this page you can find the example usage for java.util.stream Collectors joining.

Prototype

public static Collector<CharSequence, ?, String> joining(CharSequence delimiter) 

Source Link

Document

Returns a Collector that concatenates the input elements, separated by the specified delimiter, in encounter order.

Usage

From source file:com.yahoo.elide.core.hibernate.hql.AbstractHQLQueryBuilder.java

/**
 * Extracts all the HQL JOIN clauses from given filter expression.
 * @param filterExpression the filter expression to extract a join clause from
 * @return an HQL join clause/*from   w w  w.j  a  v  a  2s . c  o m*/
 */
protected String getJoinClauseFromFilters(FilterExpression filterExpression) {
    PredicateExtractionVisitor visitor = new PredicateExtractionVisitor(new ArrayList<>());
    Collection<FilterPredicate> predicates = filterExpression.accept(visitor);

    Set<String> alreadyJoined = new HashSet<>();

    return predicates.stream().map(predicate -> extractJoinClause(predicate, alreadyJoined))
            .collect(Collectors.joining(SPACE));
}

From source file:com.ikanow.aleph2.harvest.script.utils.ScriptUtils.java

/**
 * Creates a processbuilder pointed at the given script path and adds the working dir and environment vars for you.
 * Just runs a process that does "sh <script_file_path>"
 * @param script_file_path/*w w  w  .  ja v  a  2s  . c o  m*/
 * @param working_dir
 * @return
 * @throws JsonProcessingException 
 * @throws ExecutionException 
 * @throws InterruptedException 
 */
public static ProcessBuilder createProcessBuilderForScriptFile(final String script_file_path,
        final String working_dir, final Optional<Long> test_requested_num_objects,
        final Optional<Long> test_max_runtime_s, final Map<String, String> user_args,
        final IHarvestContext context, final DataBucketBean bucket, final String aleph_global_root_path)
        throws JsonProcessingException, InterruptedException, ExecutionException {
    _logger.debug("create pb for script file: " + script_file_path);

    ArrayList<String> args = new ArrayList<String>();
    args.add("sh");
    args.add(script_file_path);
    final ProcessBuilder pb = new ProcessBuilder(args);
    pb.directory(new File(working_dir)).redirectErrorStream(true);
    pb.environment().put("JAVA_OPTS", "");
    if (test_requested_num_objects.isPresent())
        pb.environment().put(ENV_TEST_NUM_OBJ, test_requested_num_objects.get().toString());
    if (test_max_runtime_s.isPresent())
        pb.environment().put(ENV_TEST_MAX_RUNTIME_S, test_max_runtime_s.get().toString());
    //add in default env vars
    final String classpath = Stream
            .concat(context.getHarvestContextLibraries(Optional.empty()).stream(),
                    context.getHarvestLibraries(Optional.of(bucket)).get().values().stream())
            .collect(Collectors.joining(":"));
    pb.environment().put(ENV_MODULE_PATH,
            context.getHarvestContextLibraries(Optional.empty()).stream().collect(Collectors.joining(":")));
    pb.environment().put(ENV_LIBRARY_PATH, context.getHarvestLibraries(Optional.of(bucket)).get().values()
            .stream().collect(Collectors.joining(":")));
    pb.environment().put(ENV_CLASS_PATH, classpath);
    pb.environment().put(ENV_BUCKET_HDFS_PATH, aleph_global_root_path + "/data" + bucket.full_name());
    pb.environment().put(ENV_BUCKET_SIGNATURE,
            BucketUtils.getUniqueSignature(bucket.full_name(), Optional.empty()));
    pb.environment().put(ENV_BUCKET_PATH, bucket.full_name());
    pb.environment().put(ENV_BUCKET_STR, BeanTemplateUtils.toJson(bucket).toString());
    //add user args   as env vars
    user_args.forEach((k, val) -> pb.environment().put(k, val));
    return pb;
}

From source file:com._4dconcept.springframework.data.marklogic.core.cts.CTSQuerySerializer.java

private String serializeSortCriteriaList(List<SortCriteria> sortCriteriaList) {
    return sortCriteriaList.stream().map(this::asCtsOrder).collect(Collectors.joining(", "));
}

From source file:com.blackducksoftware.integration.hub.detect.detector.nuget.NugetInspectorExtractor.java

public Extraction extract(final File targetDirectory, File outputDirectory, NugetInspector inspector,
        final ExtractionId extractionId) {
    try {//from  ww  w .j a  v a  2 s.  c  om

        final List<String> options = new ArrayList<>(
                Arrays.asList("--target_path=" + targetDirectory.toString(),
                        "--output_directory=" + outputDirectory.getCanonicalPath(),
                        "--ignore_failure=" + detectConfiguration.getBooleanProperty(
                                DetectProperty.DETECT_NUGET_IGNORE_FAILURE, PropertyAuthority.None)));

        final String nugetExcludedModules = detectConfiguration
                .getProperty(DetectProperty.DETECT_NUGET_EXCLUDED_MODULES, PropertyAuthority.None);
        if (StringUtils.isNotBlank(nugetExcludedModules)) {
            options.add("--excluded_modules=" + nugetExcludedModules);
        }
        final String nugetIncludedModules = detectConfiguration
                .getProperty(DetectProperty.DETECT_NUGET_INCLUDED_MODULES, PropertyAuthority.None);
        if (StringUtils.isNotBlank(nugetIncludedModules)) {
            options.add("--included_modules=" + nugetIncludedModules);
        }
        final String[] nugetPackagesRepo = detectConfiguration
                .getStringArrayProperty(DetectProperty.DETECT_NUGET_PACKAGES_REPO_URL, PropertyAuthority.None);
        if (nugetPackagesRepo.length > 0) {
            final String packagesRepos = Arrays.asList(nugetPackagesRepo).stream()
                    .collect(Collectors.joining(","));
            options.add("--packages_repo_url=" + packagesRepos);
        }
        final String nugetConfigPath = detectConfiguration.getProperty(DetectProperty.DETECT_NUGET_CONFIG_PATH,
                PropertyAuthority.None);
        if (StringUtils.isNotBlank(nugetConfigPath)) {
            options.add("--nuget_config_path=" + nugetConfigPath);
        }
        if (logger.isTraceEnabled()) {
            options.add("-v");
        }

        final ExecutableOutput executableOutput = inspector.execute(targetDirectory, options);

        if (executableOutput.getReturnCode() != 0) {
            return new Extraction.Builder()
                    .failure(String.format("Executing command '%s' returned a non-zero exit code %s",
                            String.join(" ", options), executableOutput.getReturnCode()))
                    .build();
        }

        final List<File> dependencyNodeFiles = detectFileFinder.findFiles(outputDirectory,
                INSPECTOR_OUTPUT_PATTERN);

        final List<NugetParseResult> parseResults = new ArrayList<>();
        for (final File dependencyNodeFile : dependencyNodeFiles) {
            final NugetParseResult result = nugetInspectorPackager.createDetectCodeLocation(dependencyNodeFile);
            parseResults.add(result);
        }

        final List<DetectCodeLocation> codeLocations = parseResults.stream()
                .flatMap(it -> it.codeLocations.stream()).collect(Collectors.toList());

        if (codeLocations.size() <= 0) {
            logger.warn("Unable to extract any dependencies from nuget");
        }

        final Map<String, DetectCodeLocation> codeLocationsBySource = new HashMap<>();
        final DependencyGraphCombiner combiner = new DependencyGraphCombiner();

        codeLocations.stream().forEach(codeLocation -> {
            final String sourcePathKey = codeLocation.getSourcePath().toLowerCase();
            if (codeLocationsBySource.containsKey(sourcePathKey)) {
                logger.info(
                        "Multiple project code locations were generated for: " + targetDirectory.toString());
                logger.info("This most likely means the same project exists in multiple solutions.");
                logger.info(
                        "The code location's dependencies will be combined, in the future they will exist seperately for each solution.");
                final DetectCodeLocation destination = codeLocationsBySource.get(sourcePathKey);
                combiner.addGraphAsChildrenToRoot((MutableDependencyGraph) destination.getDependencyGraph(),
                        codeLocation.getDependencyGraph());
            } else {
                codeLocationsBySource.put(sourcePathKey, codeLocation);
            }
        });

        final List<DetectCodeLocation> uniqueCodeLocations = codeLocationsBySource.values().stream()
                .collect(Collectors.toList());

        final Extraction.Builder builder = new Extraction.Builder().success(uniqueCodeLocations);
        final Optional<NugetParseResult> project = parseResults.stream()
                .filter(it -> StringUtils.isNotBlank(it.projectName)).findFirst();
        if (project.isPresent()) {
            builder.projectName(project.get().projectName);
            builder.projectVersion(project.get().projectVersion);
        }
        return builder.build();
    } catch (final Exception e) {
        return new Extraction.Builder().exception(e).build();
    }
}

From source file:com.globocom.grou.report.ReportService.java

public void send(Test test) throws Exception {
    final AtomicReference<List<Throwable>> exceptions = new AtomicReference<>(new ArrayList<>());
    final Map<String, Double> report = getReport(test);
    final HashMap<String, Double> reportSanitized = sanitizeKeyName(report);
    test.setResult(reportSanitized);/*from w  w  w  .  ja  v  a 2  s  . co m*/
    testRepository.save(test);
    test.getNotify().forEach(notify -> {
        try {
            if (VALID_EMAIL_ADDRESS_REGEX.matcher(notify).matches()) {
                notifyByMail(test, notify.replaceAll("^mailto:[/]{0,2}", ""), report);
            } else if (VALID_HTTP_ADDRESS_REGEX.matcher(notify).matches()) {
                notifyByHttp(test, notify);
            } else {
                throw new UnsupportedOperationException("notify destination unsupported: " + notify);
            }
        } catch (Exception e) {
            exceptions.get().add(e);
        }
    });
    String exceptionsStr = exceptions.get().stream().map(Throwable::getMessage)
            .collect(Collectors.joining(" "));
    if (!exceptionsStr.isEmpty()) {
        throw new IllegalStateException(exceptionsStr);
    }
}

From source file:org.smigo.user.MailHandler.java

public void sendReviewRequest(String title, List<?> current, Object edit, AuthenticatedUser user) {
    User u = user == null ? null : userHandler.getUserById(user.getId());
    String indentation = "  ";
    StringBuilder sb = new StringBuilder(title);
    sb.append(System.lineSeparator());
    sb.append("----------------------------------------------");
    sb.append(System.lineSeparator()).append(System.lineSeparator());
    sb.append("Current value(s):").append(System.lineSeparator());
    sb.append(indentation).append(current.stream().map(Object::toString)
            .collect(Collectors.joining(System.lineSeparator() + indentation)));
    sb.append(System.lineSeparator()).append(System.lineSeparator());
    sb.append("Changing value:").append(System.lineSeparator());
    sb.append(indentation).append(edit);
    sb.append(System.lineSeparator()).append(System.lineSeparator());
    sb.append("Requested by user:").append(System.lineSeparator());
    sb.append(indentation).append(u);//from  w w  w. j  av  a2  s. c o m
    sendAdminNotification("review request", sb.toString());
}

From source file:org.elasticsearch.http.DeprecationHttpIT.java

/**
 * Attempts to do a scatter/gather request that expects unique responses per sub-request.
 *///ww  w.  java  2s.  c  o  m
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/19222")
public void testUniqueDeprecationResponsesMergedTogether() throws IOException {
    final String[] indices = new String[randomIntBetween(2, 5)];

    // add at least one document for each index
    for (int i = 0; i < indices.length; ++i) {
        indices[i] = "test" + i;

        // create indices with a single shard to reduce noise; the query only deprecates uniquely by index anyway
        assertTrue(prepareCreate(indices[i]).setSettings(Settings.builder().put("number_of_shards", 1)).get()
                .isAcknowledged());

        int randomDocCount = randomIntBetween(1, 2);

        for (int j = 0; j < randomDocCount; ++j) {
            index(indices[i], "type", Integer.toString(j), "{\"field\":" + j + "}");
        }
    }

    refresh(indices);

    final String commaSeparatedIndices = Stream.of(indices).collect(Collectors.joining(","));

    final String body = "{\"query\":{\"bool\":{\"filter\":[{\"" + TestDeprecatedQueryBuilder.NAME
            + "\":{}}]}}}";

    // trigger all index deprecations
    Response response = getRestClient().performRequest("GET", "/" + commaSeparatedIndices + "/_search",
            Collections.emptyMap(), new StringEntity(body, ContentType.APPLICATION_JSON));
    assertThat(response.getStatusLine().getStatusCode(), equalTo(OK.getStatus()));

    final List<String> deprecatedWarnings = getWarningHeaders(response.getHeaders());
    final List<Matcher<String>> headerMatchers = new ArrayList<>(indices.length);

    for (String index : indices) {
        headerMatchers.add(containsString(LoggerMessageFormat.format("[{}] index", (Object) index)));
    }

    assertThat(deprecatedWarnings, hasSize(headerMatchers.size()));
    for (Matcher<String> headerMatcher : headerMatchers) {
        assertThat(deprecatedWarnings, hasItem(headerMatcher));
    }
}

From source file:com.thinkbiganalytics.metadata.jobrepo.nifi.provenance.NifiBulletinExceptionExtractor.java

/**
 * queries for bulletins from component, in the flow file
 *
 * @param flowFileIds The collection UUID of the flow file to extract the error message from
 * @return a list of bulletin objects that were posted by the component to the flow file
 * @throws NifiConnectionException if cannot query Nifi
 *//*from   ww w.  j  a v  a 2s  . c om*/
public List<BulletinDTO> getErrorBulletinsForFlowFiles(Collection<String> flowFileIds, Long afterId)
        throws NifiConnectionException {
    List<BulletinDTO> bulletins;
    try {
        String regexPattern = flowFileIds.stream().collect(Collectors.joining("|"));
        if (afterId != null && afterId != -1L) {
            bulletins = nifiRestClient.getBulletinsMatchingMessage(regexPattern, afterId);
        } else {
            bulletins = nifiRestClient.getBulletinsMatchingMessage(regexPattern);
        }
        log.info("Query for {} bulletins returned {} results ", regexPattern, bulletins.size());
        if (bulletins != null && !bulletins.isEmpty()) {
            bulletins = bulletins.stream()
                    .filter(bulletinDTO -> bulletinErrorLevels.contains(bulletinDTO.getLevel().toUpperCase()))
                    .collect(Collectors.toList());
        }

        return bulletins;
    } catch (NifiClientRuntimeException e) {
        if (e instanceof NifiConnectionException) {
            throw e;
        } else {
            log.error("Error getProcessorBulletinsForFlowFiles {}, {}", flowFileIds, e.getMessage());
        }
    }
    return null;
}

From source file:com.ethercamp.harmony.web.controller.WebSocketController.java

@RequestMapping(value = "/logs", method = RequestMethod.GET)
@ResponseBody//from  w w w .  ja v a 2  s.  c o m
public String listLogFiles() {
    final File logsLocation = new File(getLogsDir());
    final File[] files = logsLocation.listFiles();
    if (files == null) {
        return "No logs found";
    }

    return "<html><body>"
            + Arrays.asList(files).stream().sorted()
                    .map(f -> "<a href='logs/" + f.getName() + "'>" + f.getName() + "</a> "
                            + readableFileSize(f.length()))
                    .collect(Collectors.joining("<br>"))

            + "</body></html>";
}

From source file:ac.simons.tweetarchive.web.ArchiveHandlingController.java

/**
 * As you can see, it get's nasty here...
 * <br>/*  w  w  w.jav  a  2 s .c o  m*/
 * Twitter4j doesn't offer an official way to parse Twitters JSON, so I
 * brute force my way into the twitter4j.StatusJSONImpl implementation of
 * Status.
 * <br>
 * And even if there was an official way, the JSON files inside the
 * official(!) Twitter archive differ from the API, even if they are said to
 * be identical. By the way, I'm not the only one, who
 * <a href="https://twittercommunity.com/t/why-does-twitter-json-archive-have-a-different-format-than-the-rest-api-1-1/35530">noticed
 * that</a>.
 * <br>
 * Furthermore, I didn't even bother to add error handling or tests.
 *
 * @param archive The uploaded archive
 * @return Redirect to the index
 * @throws java.io.IOException
 * @throws twitter4j.JSONException
 */
@PostMapping
public String store(@NotNull final MultipartFile archive, final RedirectAttributes redirectAttributes)
        throws IOException, JSONException {
    try (final ZipInputStream archiv = new ZipInputStream(archive.getInputStream())) {
        ZipEntry entry;
        while ((entry = archiv.getNextEntry()) != null) {
            if (!entry.getName().startsWith("data/js/tweets/") || entry.isDirectory()) {
                continue;
            }
            log.debug("Reading archive entry {}...", entry.getName());
            final BufferedReader buffer = new BufferedReader(
                    new InputStreamReader(archiv, StandardCharsets.UTF_8));

            final String content = buffer.lines().skip(1).map(l -> {
                Matcher m = PATTERN_CREATED_AT.matcher(l);
                String rv = l;
                if (m.find()) {
                    try {
                        rv = m.replaceFirst(
                                "$1\"" + DATE_FORMAT_OUT.format(DATE_FORMAT_IN.parse(m.group(2))) + "\"");
                    } catch (ParseException ex) {
                        log.warn("Unexpected date format in twitter archive", ex);
                    }
                }
                return rv;
            }).collect(Collectors.joining("")).replaceAll("\"sizes\" : \\[.+?\\],", "\"sizes\" : {},");

            final JSONArray statuses = new JSONArray(content);
            for (int i = 0; i < statuses.length(); ++i) {
                final JSONObject rawJSON = statuses.getJSONObject(i);
                // https://twitter.com/lukaseder/status/772772372990586882 ;)
                final Status status = statusFactory.create(rawJSON).as(Status.class);
                this.tweetStorageService.store(status, rawJSON.toString());
            }
        }
    }
    redirectAttributes.addFlashAttribute("message", "Done.");
    return "redirect:/upload";
}