Example usage for java.util.stream Collectors joining

List of usage examples for java.util.stream Collectors joining

Introduction

In this page you can find the example usage for java.util.stream Collectors joining.

Prototype

public static Collector<CharSequence, ?, String> joining(CharSequence delimiter) 

Source Link

Document

Returns a Collector that concatenates the input elements, separated by the specified delimiter, in encounter order.

Usage

From source file:com.simiacryptus.util.lang.CodeUtil.java

/**
 * Gets heapCopy text./* w  ww.j  a v  a 2  s .c o  m*/
 *
 * @param callingFrame the calling frame
 * @return the heapCopy text
 * @throws IOException the io exception
 */
public static String getInnerText(@javax.annotation.Nonnull final StackTraceElement callingFrame)
        throws IOException {
    try {
        @javax.annotation.Nonnull
        final File file = com.simiacryptus.util.lang.CodeUtil.findFile(callingFrame);
        assert null != file;
        final int start = callingFrame.getLineNumber() - 1;
        final List<String> allLines = Files.readAllLines(file.toPath());
        final String txt = allLines.get(start);
        @javax.annotation.Nonnull
        final String indent = com.simiacryptus.util.lang.CodeUtil.getIndent(txt);
        @javax.annotation.Nonnull
        final ArrayList<String> lines = new ArrayList<>();
        for (int i = start + 1; i < allLines.size()
                && (com.simiacryptus.util.lang.CodeUtil.getIndent(allLines.get(i)).length() > indent.length()
                        || allLines.get(i).trim().isEmpty()); i++) {
            final String line = allLines.get(i);
            lines.add(line.substring(Math.min(indent.length(), line.length())));
        }
        return lines.stream().collect(Collectors.joining("\n"));
    } catch (@javax.annotation.Nonnull final Throwable e) {
        return "";
    }
}

From source file:eu.freme.common.rest.NIFParameterFactory.java

public NIFParameterSet constructFromHttp(String input, String informat, String outformat, String postBody,
        String acceptHeader, String contentTypeHeader, String prefix, String nifVersion,
        boolean allowEmptyInput) throws BadRequestException {

    String thisInput;/* ww  w.java 2 s . c om*/
    if (!allowEmptyInput && input == null && postBody == null) {
        throw new BadRequestException("no input found in request");
    } else if (input != null) {
        thisInput = input;
    } else {
        thisInput = postBody;
    }

    String thisInformat;
    if (informat == null && contentTypeHeader == null) {
        thisInformat = TURTLE;
    } else if (informat != null) {
        thisInformat = serializationFormatMapper.get(informat);
        if (thisInformat == null) {
            throw new BadRequestException("parameter informat has invalid value \"" + informat
                    + "\". Please use one of the registered serialization format values: "
                    + serializationFormatMapper.keySet().stream().collect(Collectors.joining(", ")));
        }
    } else {
        String[] contentTypeHeaderParts = contentTypeHeader.split(";");
        thisInformat = serializationFormatMapper.get(contentTypeHeaderParts[0]);
        if (thisInformat == null) {
            throw new BadRequestException("Content-Type header has invalid value \"" + contentTypeHeader
                    + "\". Please use one of the registered serialization format values: "
                    + serializationFormatMapper.keySet().stream().collect(Collectors.joining(", ")));
        }
    }
    if (!RDFConstants.SERIALIZATION_FORMATS.contains(thisInformat)
            && !thisInformat.equals(SerializationFormatMapper.PLAINTEXT)) {
        throw new UnsupportedRDFSerializationException("Parameter informat has invalid value \"" + thisInformat
                + "\". Please use one of: "
                + SERIALIZATION_FORMATS.stream()
                        .map(v -> MapUtils.invertMap(serializationFormatMapper).get(v).toString())
                        .collect(Collectors.joining(", "))
                + " or " + SerializationFormatMapper.PLAINTEXT);
    }

    String thisOutformat;
    if (acceptHeader != null && acceptHeader.equals("*/*")) {
        acceptHeader = TURTLE;
    }
    if (outformat == null && acceptHeader == null) {
        thisOutformat = TURTLE;
    } else if (outformat != null) {
        thisOutformat = serializationFormatMapper.get(outformat);
        if (thisOutformat == null) {
            throw new BadRequestException("Parameter outformat has invalid value \"" + outformat
                    + "\". Please use one of the registered serialization format values: "
                    + serializationFormatMapper.keySet().stream().collect(Collectors.joining(", ")));
        }
    } else {
        thisOutformat = serializationFormatMapper.get(acceptHeader.split(";")[0]);
        if (thisOutformat == null) {
            throw new BadRequestException("Accept header has invalid value \"" + acceptHeader.split(";")[0]
                    + "\". Please use one of the registered serialization format values: "
                    + serializationFormatMapper.keySet().stream().collect(Collectors.joining(", ")));
        }
    }
    if (!RDFConstants.SERIALIZATION_FORMATS.contains(thisOutformat)) {
        throw new UnsupportedRDFSerializationException(
                "Parameter outformat has invalid value \"" + thisOutformat + "\". Please use one of: "
                        + SERIALIZATION_FORMATS.stream()
                                .map(v -> MapUtils.invertMap(serializationFormatMapper).get(v).toString())
                                .collect(Collectors.joining(", ")));
    }

    String thisPrefix;
    if (prefix == null) {
        thisPrefix = getDefaultPrefix();
    } else {
        thisPrefix = prefix;
    }
    String[] schemes = { "http", "https" };
    UrlValidator urlValidator = new UrlValidator(schemes);
    if (!urlValidator.isValid(thisPrefix)) {
        throw new BadRequestException("invalid prefix");
    }

    if (nifVersion == null) {
        nifVersion = RDFConstants.nifVersion2_0;
    } else if (!(nifVersion.equals(RDFConstants.nifVersion2_0)
            || nifVersion.equals(RDFConstants.nifVersion2_1))) {
        throw new NIFVersionNotSupportedException("NIF version \"" + nifVersion + "\" is not supported");
    }

    return new NIFParameterSet(thisInput, thisInformat, thisOutformat, thisPrefix, nifVersion);
}

From source file:com.kantenkugel.discordbot.jdocparser.JDocParser.java

static void parse(final String jdocBase, final String name, final InputStream inputStream,
        Map<String, ClassDocumentation> docs) {
    final String[] pathSplits = name.split("/");
    final String fileName = pathSplits[pathSplits.length - 1];
    if (!Character.isUpperCase(fileName.charAt(0))) {
        //ignore jdoc structure html
        return;/* w w w  .  jav a2 s.com*/
    }
    final String[] nameSplits = fileName.split("\\.");
    final String className = nameSplits[nameSplits.length - 2];
    final String fullName = fileName.substring(0,
            fileName.length() - nameSplits[nameSplits.length - 1].length() - 1);
    try (BufferedReader buffer = new BufferedReader(new InputStreamReader(inputStream))) {
        //create dom Document
        final String content = buffer.lines().collect(Collectors.joining("\n"));
        Document document = Jsoup.parse(content);

        //classDocument (classname, package, description)
        Element titleElem = getSingleElementByClass(document, "title");
        final String classSig = JDocUtil.fixSpaces(titleElem.text());
        Element packageElem = titleElem.previousElementSibling();
        if (packageElem.children().size() > 1) {
            packageElem = packageElem.children().last();
        }
        final String pack = JDocUtil.fixSpaces(packageElem.text());
        final String link = JDocUtil.getLink(jdocBase, pack, fullName);
        Element descriptionElement = null;
        Elements descriptionCandidates = document.select(".description .block");
        if (descriptionCandidates.size() > 1) {
            List<Element> removed = descriptionCandidates.stream().map(elem -> elem.child(0))
                    .filter(child -> child != null && !child.className().startsWith("deprecat"))
                    .map(Element::parent).collect(Collectors.toList());
            if (removed.size() != 1)
                throw new RuntimeException("Found too many description candidates");
            descriptionElement = removed.get(0);
        } else if (descriptionCandidates.size() == 1) {
            descriptionElement = descriptionCandidates.get(0);
        }
        final String description = descriptionElement == null ? ""
                : JDocUtil.formatText(descriptionElement.html(), link);
        final ClassDocumentation classDoc = new ClassDocumentation(pack, fullName, classSig, description,
                classSig.startsWith("Enum"));

        //methods, fields
        final Element details = document.getElementsByClass("details").first();
        if (details != null) {
            //methods
            Element tmp = getSingleElementByQuery(details, "a[name=\"method.detail\"]");
            List<DocBlock> docBlock = getDocBlock(jdocBase, tmp, classDoc);
            if (docBlock != null) {
                for (DocBlock block : docBlock) {
                    Set<MethodDocumentation> mdocs = classDoc.methodDocs
                            .computeIfAbsent(block.title.toLowerCase(), key -> new HashSet<>());
                    mdocs.add(new MethodDocumentation(classDoc, block.signature, block.hashLink,
                            block.description, block.fields));
                }
            }
            //vars
            tmp = getSingleElementByQuery(details, "a[name=\"field.detail\"]");
            docBlock = getDocBlock(jdocBase, tmp, classDoc);
            if (docBlock != null) {
                for (DocBlock block : docBlock) {
                    classDoc.classValues.put(block.title.toLowerCase(), new ValueDocumentation(classDoc,
                            block.title, block.hashLink, block.signature, block.description));
                }
            }
            //enum-values
            tmp = getSingleElementByQuery(details, "a[name=\"enum.constant.detail\"]");
            docBlock = getDocBlock(jdocBase, tmp, classDoc);
            if (docBlock != null) {
                for (DocBlock block : docBlock) {
                    classDoc.classValues.put(block.title.toLowerCase(), new ValueDocumentation(classDoc,
                            block.title, block.hashLink, block.signature, block.description));
                }
            }
        }
        final Element methodSummary = getSingleElementByQuery(document, "a[name=\"method.summary\"]");
        classDoc.inheritedMethods.putAll(getInheritedMethods(methodSummary));

        //storing
        if (nameSplits.length > 2) {
            if (!docs.containsKey(nameSplits[0].toLowerCase()))
                docs.put(nameSplits[0].toLowerCase(), new ClassDocumentation(null, null, null, null, false));
            ClassDocumentation parent = docs.get(nameSplits[0].toLowerCase());
            for (int i = 1; i < nameSplits.length - 2; i++) {
                if (!parent.subClasses.containsKey(nameSplits[i].toLowerCase()))
                    parent.subClasses.put(nameSplits[i].toLowerCase(),
                            new ClassDocumentation(null, null, null, null, false));
                parent = parent.subClasses.get(nameSplits[i].toLowerCase());
            }
            if (parent.subClasses.containsKey(className.toLowerCase()))
                classDoc.subClasses.putAll(parent.subClasses.get(className.toLowerCase()).subClasses);
            parent.subClasses.put(className.toLowerCase(), classDoc);
        }
        if (docs.containsKey(fullName.toLowerCase())) {
            ClassDocumentation current = docs.get(fullName.toLowerCase());
            if (current.classSig != null)
                throw new RuntimeException("Got a class-name conflict with classes " + classDoc.classSig + "("
                        + classDoc.className + ") AND " + current.classSig + "(" + current.className + ")");
            classDoc.subClasses.putAll(current.subClasses);
        }
        docs.put(fullName.toLowerCase(), classDoc);
    } catch (final IOException | NullPointerException ex) {
        JDocUtil.LOG.error("Got excaption for element {}", fullName, ex);
    }
    try {
        inputStream.close();
    } catch (final IOException e) {
        JDocUtil.LOG.error("Error closing inputstream", e);
    }
}

From source file:com.epam.catgenome.manager.vcf.VcfFileManager.java

@Transactional(propagation = Propagation.REQUIRED)
public List<VcfFile> loadVcfFiles(List<Long> ids) {
    if (CollectionUtils.isEmpty(ids)) {
        return Collections.emptyList();
    }/*from  w ww .jav a  2s  .  c o  m*/

    List<VcfFile> files = vcfFileDao.loadVcfFiles(ids);
    if (files.size() != ids.size()) {
        List<Long> notFound = new ArrayList<>(ids);
        notFound.removeAll(files.stream().map(BaseEntity::getId).collect(Collectors.toList()));
        Assert.isTrue(notFound.isEmpty(), MessageHelper.getMessage(MessagesConstants.ERROR_FILE_NOT_FOUND,
                notFound.stream().map(i -> i.toString()).collect(Collectors.joining(", "))));
    }

    return files;
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchHadoopUtils.java

/** 
 * @param input_config - the input settings
 * @return/*  ww w. j  a v a2s.  c  o  m*/
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static IAnalyticsAccessContext<InputFormat> getInputFormat(final Client client,
        final AnalyticThreadJobBean.AnalyticThreadJobInputBean job_input) {
    return new IAnalyticsAccessContext<InputFormat>() {
        private LinkedHashMap<String, Object> _mutable_output = null;

        @Override
        public String describe() {
            //(return the entire thing)
            return ErrorUtils.get("service_name={0} options={1}",
                    this.getAccessService().right().value().getSimpleName(), this.getAccessConfig().get());
        }

        /* (non-Javadoc)
         * @see com.ikanow.aleph2.data_model.interfaces.data_analytics.IAnalyticsAccessContext#getAccessService()
         */
        @Override
        public Either<InputFormat, Class<InputFormat>> getAccessService() {
            return Either.right((Class<InputFormat>) (Class<?>) Aleph2EsInputFormat.class);
        }

        /* (non-Javadoc)
         * @see com.ikanow.aleph2.data_model.interfaces.data_analytics.IAnalyticsAccessContext#getAccessConfig()
         */
        @Override
        public Optional<Map<String, Object>> getAccessConfig() {
            if (null != _mutable_output) {
                return Optional.of(_mutable_output);
            }
            _mutable_output = new LinkedHashMap<>();

            // Check for input record limit:
            Optional.ofNullable(job_input.config()).map(cfg -> cfg.test_record_limit_request()).ifPresent(
                    max -> _mutable_output.put(Aleph2EsInputFormat.BE_DEBUG_MAX_SIZE, Long.toString(max)));

            final String index_resource = ElasticsearchContext.READ_PREFIX
                    + ElasticsearchIndexUtils.getBaseIndexName(BeanTemplateUtils.build(DataBucketBean.class)
                            .with(DataBucketBean::full_name, job_input.resource_name_or_id()).done().get(),
                            Optional.empty())
                    + "*";

            //TODO (ALEPH-72): support multi-buckets / buckets with non-standard indexes ... also use the tmin/tmax
            // (needs MDB to pull out - because need to get the full bucket ugh)

            // Currently need to add types: 
            //TODO (ALEPH-72): 2.2.0 you _can_ just put "indexes/" to get all types - that doesn't work for all es-hadoop code though
            final Multimap<String, String> index_type_mapping = ElasticsearchIndexUtils.getTypesForIndex(client,
                    index_resource);
            final String type_resource = index_type_mapping.values().stream().collect(Collectors.toSet())
                    .stream().collect(Collectors.joining(","));
            final String final_index = getTimedIndexes(job_input, index_type_mapping,
                    new Date())
                            .map(s -> Stream
                                    .concat(s,
                                            TimeSliceDirUtils.getUntimedDirectories(
                                                    index_type_mapping.keySet().stream()))
                                    .collect(Collectors.joining(",")))
                            .orElse(index_resource);

            _mutable_output.put("es.resource", final_index + "/" + type_resource);

            _mutable_output.put("es.read.metadata", "true");
            _mutable_output.put("es.read.metadata.field", Aleph2EsInputFormat.ALEPH2_META_FIELD);

            _mutable_output.put("es.index.read.missing.as.empty", "yes");

            _mutable_output.put("es.query",
                    Optional.ofNullable(job_input.filter()).map(f -> f.get("technology_override")).map(o -> {
                        return (o instanceof String) ? o.toString()
                                : _mapper.convertValue(o, JsonNode.class).toString();
                    }).orElse("?q=*"));
            //TODO (ALEPH-72) (incorporate tmin/tmax and also add a JSON mapping for the Aleph2 crud utils)

            // Here are the parameters that can be set:
            // es.query ... can be stringified JSON or a q=string .... eg conf.set("es.query", "?q=me*");  
            //config.set("es.resource", overallIndexNames.toString()); .. .this was in the format X,Y,Z[/type],,etc which then got copied to 
            // create a simple multi-input format .. looks like i didn't do anything if no type was set, unclear if that was an optimization
            // or if it doesn't work... (if it doesn't work then what you have to do is get the mappings for each index and
            // get the types and insert them all)
            //config.set("es.index.read.missing.as.empty", "yes");

            // (not sure if need to set just normal http port/host?)
            //config.set("es.net.proxy.http.host", "localhost");
            //config.set("es.net.proxy.http.port", "8888");

            return Optional.of(Collections.unmodifiableMap(_mutable_output));
        }
    };
}

From source file:ddf.lib.OwaspDiffRunner.java

private static String getModulesOfChangedPoms() throws OwaspDiffRunnerException {
    String changedFiles;//from   w  ww .j av a 2s  .  c  o  m
    String currentBranchName;

    try {
        currentBranchName = IOUtils.toString(runTime.exec(GIT_SHORT_BRANCH_NAME_COMMAND).getInputStream())
                .replace(File.separator, "").replace(System.getProperty("line.separator"), "");

        changedFiles = IOUtils.toString(
                runTime.exec(GIT_DIFF_NAME_COMMAND + currentBranchName + "..master").getInputStream());
    } catch (IOException e) {
        throw new OwaspDiffRunnerException(OwaspDiffRunnerException.UNABLE_TO_RETRIEVE_GIT_INFO, e);
    }

    System.out.println("Comparing commits of branch " + currentBranchName + " to master. Changed poms: ");

    return Arrays.stream(changedFiles.split(System.getProperty("line.separator")))
            .filter(path -> path.endsWith("pom.xml")).peek(System.out::println)
            .map(path -> path.endsWith(File.separator + "pom.xml")
                    ? path.replace(File.separator + "pom.xml", "")
                    : path.replace("pom.xml", File.separator)) //Special case for the root pom, change path pom.xml -> /
            .collect(Collectors.joining(","));
}

From source file:com.liferay.dynamic.data.lists.form.web.internal.converter.DDLFormRuleToDDMFormRuleConverter.java

protected String convertOperand(DDLFormRuleCondition.Operand operand) {
    if (Objects.equals("field", operand.getType())) {
        return String.format(_functionCallUnaryExpressionFormat, "getValue",
                StringUtil.quote(operand.getValue()));
    }//from  ww  w .  j a va2  s.  c o  m

    String value = operand.getValue();

    if (NumberUtils.isNumber(value)) {
        return value;
    }

    String[] values = StringUtil.split(value);

    UnaryOperator<String> quoteOperation = StringUtil::quote;
    UnaryOperator<String> trimOperation = StringUtil::trim;

    Stream<String> valuesStream = Stream.of(values);

    Stream<String> valueStream = valuesStream.map(trimOperation.andThen(quoteOperation));

    return valueStream.collect(Collectors.joining(StringPool.COMMA_AND_SPACE));
}

From source file:org.springframework.cloud.config.server.environment.HttpClientConfigurableHttpConnectionFactory.java

private HttpClientBuilder lookupHttpClientBuilder(final URL url) {
    Map<String, HttpClientBuilder> builderMap = this.httpClientBuildersByUri.entrySet().stream()
            .filter(entry -> {/*  w  ww .j a  v  a2 s  . c  o  m*/
                String key = entry.getKey();
                String spec = getUrlWithPlaceholders(url, key);
                if (spec.equals(key)) {
                    return true;
                }
                int index = spec.lastIndexOf("/");
                while (index != -1) {
                    spec = spec.substring(0, index);
                    if (spec.equals(key)) {
                        return true;
                    }
                    index = spec.lastIndexOf("/");
                }
                return false;
            }).collect(toMap(Map.Entry::getKey, Map.Entry::getValue));

    if (builderMap.isEmpty()) {
        this.log.warn(String.format("No custom http config found for URL: %s", url));
        return HttpClients.custom();
    }
    if (builderMap.size() > 1) {
        this.log.error(String.format(
                "More than one git repo URL template matched URL:"
                        + " %s, proxy and skipSslValidation config won't be applied. Matched templates: %s",
                url, builderMap.keySet().stream().collect(Collectors.joining(", "))));
        return HttpClients.custom();
    }
    return new ArrayList<>(builderMap.values()).get(0);
}

From source file:net.jodah.failsafe.internal.actions.ActionRegistry.java

/**
 * Verifies if all expected actions that cannot be left incomplete have been completed.
 *
 * @throws AssertionError if there are recorded actions left to execute
 *//*from ww w.  ja  v  a2  s  .  c o m*/
public void verify() {
    synchronized (controller) {
        processing.forEach(Expectation::verify);
        if (!expectations.isEmpty()) {
            throw controller.setFailure(new AssertionError(String.format(
                    "expecting %d executions for `%s` but only %d occurred; the following were not executed: %n\t%s%n",
                    (expectations.size() + processing.size()), controller, processing.size(),
                    expectations.stream().map(Expectation::toString)
                            .collect(Collectors.joining(ActionRegistry.NL_AND_INDENT)))));
        }
    }
}

From source file:com.openthinks.webscheduler.model.security.User.java

public String getJoinedKeys() {
    String joinedKeys = getRoles().stream().map((role) -> {
        return role.getId();
    }).collect(Collectors.joining(StaticDict.PAGE_PARAM_LIST_JOIN));
    return joinedKeys;
}