Example usage for java.util.stream Collectors joining

List of usage examples for java.util.stream Collectors joining

Introduction

In this page you can find the example usage for java.util.stream Collectors joining.

Prototype

public static Collector<CharSequence, ?, String> joining(CharSequence delimiter) 

Source Link

Document

Returns a Collector that concatenates the input elements, separated by the specified delimiter, in encounter order.

Usage

From source file:com.demonwav.mcdev.buildsystem.gradle.GradleBuildSystem.java

private void createRepositoriesOrDependencies(@NotNull Project project, @NotNull GroovyFile file,
        @NotNull String name, @NotNull List<String> expressions) {

    // Get the block so we can start working with it
    GrClosableBlock block = getClosableBlockByName(file, name);

    if (block == null) {
        return;/* ww  w  .  ja  v a2  s . c  om*/
    }

    // Create a super expression with all the expressions tied together
    String expressionText = expressions.stream().collect(Collectors.joining("\n"));

    // We can't create each expression and add them to the file...that won't work. Groovy requires a new line
    // from one method call expression to another, and there's no way to put whitespace in Psi because Psi is
    // stupid. So instead we make the whole thing as one big clump and insert it into the block
    GroovyFile fakeFile = GroovyPsiElementFactoryImpl.getInstance(project).createGroovyFile(expressionText,
            false, null);
    PsiElement last = block.getChildren()[block.getChildren().length - 1];
    block.addBefore(fakeFile, last);
}

From source file:cz.lbenda.dataman.db.ExportTableData.java

/** Write rows to CSV file
 * @param sqlQueryRows rows//w w w . j ava2 s  .co m
 * @param writer writer where are data write */
public static void writeSqlQueryRowsToTXT(SQLQueryRows sqlQueryRows, Writer writer) throws IOException {
    String joined = sqlQueryRows.getMetaData().getColumns().stream()
            .map(cd -> fixedString(cd.getName(), cd.getSize())).collect(Collectors.joining(""));
    writer.append(joined).append(Constants.CSV_NEW_LINE_SEPARATOR);
    for (RowDesc row : sqlQueryRows.getRows()) {
        joined = sqlQueryRows.getMetaData().getColumns().stream()
                .map(cd -> fixedString(row.getColumnValueStr(cd), cd.getSize()))
                .collect(Collectors.joining(""));
        writer.append(joined).append(Constants.CSV_NEW_LINE_SEPARATOR);
        writer.flush();
    }
}

From source file:com.haulmont.cuba.core.sys.FetchGroupManager.java

private void applyView(JpaQuery query, String queryString, AttributeGroup attrGroup, View view,
        boolean singleResultExpected) {

    boolean useFetchGroup = attrGroup instanceof FetchGroup;

    Set<FetchGroupField> fetchGroupFields = new LinkedHashSet<>();
    processView(view, null, fetchGroupFields, useFetchGroup);

    Set<String> fetchGroupAttributes = new TreeSet<>();
    Map<String, String> fetchHints = new TreeMap<>(); // sort hints by attribute path

    for (FetchGroupField field : fetchGroupFields) {
        fetchGroupAttributes.add(field.path());
    }/*w  ww . java2s . c o m*/
    if (attrGroup instanceof FetchGroup)
        ((FetchGroup) attrGroup).setShouldLoadAll(true);

    List<FetchGroupField> refFields = new ArrayList<>();
    for (FetchGroupField field : fetchGroupFields) {
        if (field.metaProperty.getRange().isClass() && !metadataTools.isEmbedded(field.metaProperty))
            refFields.add(field);
    }

    boolean hasBatches = false;

    MetaClass metaClass = metadata.getClassNN(view.getEntityClass());
    if (!refFields.isEmpty()) {
        String alias = QueryTransformerFactory.createParser(queryString).getEntityAlias();

        List<FetchGroupField> batchFields = new ArrayList<>();
        List<FetchGroupField> joinFields = new ArrayList<>();

        for (FetchGroupField refField : refFields) {
            if (refField.fetchMode == FetchMode.UNDEFINED) {
                if (refField.metaProperty.getRange().getCardinality().isMany()) {
                    List<String> masterAttributes = getMasterEntityAttributes(fetchGroupFields, refField,
                            useFetchGroup);
                    fetchGroupAttributes.addAll(masterAttributes);
                }
                continue;
            }

            boolean selfRef = false;
            for (MetaProperty mp : refField.metaPropertyPath.getMetaProperties()) {
                if (!mp.getRange().getCardinality().isMany()) {
                    MetaClass mpClass = mp.getRange().asClass();
                    if (metadataTools.isAssignableFrom(mpClass, metaClass)
                            || metadataTools.isAssignableFrom(metaClass, mpClass)) {
                        batchFields.add(refField);
                        selfRef = true;
                        break;
                    }
                }
            }

            if (!selfRef) {
                if (refField.metaProperty.getRange().getCardinality().isMany()) {
                    List<String> masterAttributes = getMasterEntityAttributes(fetchGroupFields, refField,
                            useFetchGroup);
                    fetchGroupAttributes.addAll(masterAttributes);

                    if (refField.fetchMode == FetchMode.JOIN) {
                        joinFields.add(refField);
                    } else {
                        batchFields.add(refField);
                    }
                } else {
                    if (refField.fetchMode == FetchMode.BATCH) {
                        batchFields.add(refField);
                    } else {
                        joinFields.add(refField);
                    }
                }
            }
        }

        for (FetchGroupField joinField : new ArrayList<>(joinFields)) {
            // adjust fetch mode according to parent attributes
            if (joinField.fetchMode == FetchMode.AUTO) {
                Optional<FetchMode> parentMode = refFields.stream()
                        .filter(f -> joinField.metaPropertyPath.startsWith(f.metaPropertyPath)
                                && joinField.fetchMode != FetchMode.JOIN)
                        .sorted((f1, f2) -> f1.metaPropertyPath.getPath().length
                                - f2.metaPropertyPath.getPath().length)
                        .findFirst().map(f -> f.fetchMode);
                if (parentMode.isPresent() && parentMode.get() == FetchMode.UNDEFINED) {
                    joinFields.remove(joinField);
                } else {
                    for (FetchGroupField batchField : new ArrayList<>(batchFields)) {
                        if (joinField.metaPropertyPath.startsWith(batchField.metaPropertyPath)) {
                            joinFields.remove(joinField);
                            batchFields.add(joinField);
                        }
                    }
                }
            }
        }

        QueryParser parser = QueryTransformerFactory.createParser(queryString);

        List<FetchGroupField> isNullFields = joinFields.stream()
                .filter(f -> f.fetchMode == FetchMode.AUTO && parser.hasIsNullCondition(f.path()))
                .collect(Collectors.toList());
        if (!isNullFields.isEmpty()) {
            for (Iterator<FetchGroupField> fieldIt = joinFields.iterator(); fieldIt.hasNext();) {
                FetchGroupField joinField = fieldIt.next();
                boolean isNullField = isNullFields.stream()
                        .anyMatch(f -> joinField == f || f.fetchMode == FetchMode.AUTO
                                && joinField.metaPropertyPath.startsWith(f.metaPropertyPath));
                if (isNullField) {
                    fieldIt.remove();
                    fetchGroupAttributes.removeIf(attr -> attr.startsWith(joinField.path() + "."));
                }
            }
        }

        long toManyCount = refFields.stream().filter(f -> f.metaProperty.getRange().getCardinality().isMany())
                .count();

        // For query by ID, remove BATCH mode for to-many attributes that have no nested attributes
        if (singleResultExpected && toManyCount <= 1) {
            for (FetchGroupField batchField : new ArrayList<>(batchFields)) {
                if (batchField.metaProperty.getRange().getCardinality().isMany()) {
                    boolean hasNested = refFields.stream().anyMatch(
                            f -> f != batchField && f.metaPropertyPath.startsWith(batchField.metaPropertyPath));
                    if (!hasNested && batchField.fetchMode != FetchMode.BATCH) {
                        batchFields.remove(batchField);
                    }
                }
            }
        }

        //Find many-to-many fields with cycle loading same: {E}.b.a.b, where b of type {E}.
        //If {E}.b BATCH, {E}.b.a BATCH and {E}.b.a.b BATCH then same query used simultaneously
        //while loading {E}.b and {E}.b.a.b, so result of batch query is incorrect.
        //Remove this fields from BATCH processing
        for (FetchGroupField refField : refFields) {
            if (refField.fetchMode == FetchMode.AUTO
                    && refField.metaProperty.getRange().getCardinality() == Range.Cardinality.MANY_TO_MANY) {
                //find property {E}.a.b for {E}.a where b of type {E}
                List<FetchGroupField> selfRefs = refFields.stream()
                        .filter(f -> isTransitiveSelfReference(refField, f)).collect(Collectors.toList());
                for (FetchGroupField selfRef : selfRefs) {
                    List<FetchGroupField> secondLevelSelfRefs = refFields.stream()
                            .filter(f -> isTransitiveSelfReference(selfRef, f)).collect(Collectors.toList());
                    for (FetchGroupField f : secondLevelSelfRefs) {
                        batchFields.remove(f);
                        batchFields.remove(selfRef);
                        batchFields.remove(refField);
                    }
                }
            }
        }

        for (FetchGroupField joinField : joinFields) {
            String attr = alias + "." + joinField.path();
            fetchHints.put(attr, QueryHints.LEFT_FETCH);
        }

        for (FetchGroupField batchField : batchFields) {
            if (batchField.fetchMode == FetchMode.BATCH || !singleResultExpected || batchFields.size() > 1) {
                String attr = alias + "." + batchField.path();
                fetchHints.put(attr, QueryHints.BATCH);
                hasBatches = true;
            }
        }
    }

    if (log.isTraceEnabled())
        log.trace((useFetchGroup ? "Fetch" : "Load") + " group for " + view + ":\n"
                + fetchGroupAttributes.stream().collect(Collectors.joining("\n")));
    for (String attribute : fetchGroupAttributes) {
        attrGroup.addAttribute(attribute);
    }

    if (!metadataTools.isCacheable(metaClass)) {
        query.setHint(useFetchGroup ? QueryHints.FETCH_GROUP : QueryHints.LOAD_GROUP, attrGroup);
    }

    if (log.isDebugEnabled()) {
        String fetchModes = fetchHints.entrySet().stream()
                .map(e -> e.getKey() + "=" + (e.getValue().equals(QueryHints.LEFT_FETCH) ? "JOIN" : "BATCH"))
                .collect(Collectors.joining(", "));
        log.debug("Fetch modes for " + view + ": " + (fetchModes.equals("") ? "<none>" : fetchModes));
    }
    for (Map.Entry<String, String> entry : fetchHints.entrySet()) {
        query.setHint(entry.getValue(), entry.getKey());
    }

    if (hasBatches) {
        query.setHint(QueryHints.BATCH_TYPE, "IN");
    }
}

From source file:org.eclipse.packagedrone.repo.channel.web.channel.ChannelController.java

private static String joinChannelNames(final Collection<String> names) {
    return names.stream().collect(Collectors.joining("\n"));
}

From source file:com.evolveum.midpoint.notifications.impl.notifiers.SimpleWorkflowNotifier.java

private void appendAssigneeInformation(StringBuilder sb, WorkItemEvent event, OperationResult result) {
    WorkItemType workItem = event.getWorkItem();
    ObjectReferenceType originalAssignee = workItem.getOriginalAssigneeRef();
    List<ObjectReferenceType> currentAssignees = workItem.getAssigneeRef();
    boolean atLeastOne = false;
    if (currentAssignees.size() != 1
            || !java.util.Objects.equals(originalAssignee.getOid(), currentAssignees.get(0).getOid())) {
        UserType originalAssigneeObject = (UserType) functions.getObjectType(originalAssignee, true, result);
        sb.append("Originally allocated to: ")
                .append(textFormatter.formatUserName(originalAssigneeObject, originalAssignee.getOid()))
                .append("\n");
        atLeastOne = true;/*from  ww w  .  j a  va2 s . c o m*/
    }
    if (!workItem.getAssigneeRef().isEmpty()) {
        sb.append("Allocated to");
        if (event.getOperationKind() == WorkItemOperationKindType.DELEGATE) {
            sb.append(event.isAdd() ? " (after delegation)" : " (before delegation)");
        } else if (event.getOperationKind() == WorkItemOperationKindType.ESCALATE) {
            sb.append(event.isAdd() ? " (after escalation)" : " (before escalation)");
        }
        sb.append(": ");
        sb.append(workItem.getAssigneeRef().stream().map(ref -> textFormatter.formatUserName(ref, result))
                .collect(Collectors.joining(", ")));
        sb.append("\n");
        atLeastOne = true;
    }
    if (atLeastOne) {
        sb.append("\n");
    }
}

From source file:ijfx.service.uicontext.UiContextService.java

public void importContextConfiguration(String json) {
    ObjectMapper mapper = new ObjectMapper();

    List<UiContext> contextList = null;

    try {//from  ww w.j  a va  2  s . c  om
        contextList = mapper.readValue(json,
                TypeFactory.defaultInstance().constructCollectionType(List.class, UiContext.class));

    } catch (IOException ex) {
        logger.log(Level.SEVERE, "Error when converting the json file into context configuration.", ex);
    }

    contextList.forEach(context -> {
        logger.info(String.format("Loaded context %s which is incompatible with : %s", context.getId(),
                context.getIncompatibles().stream().collect(Collectors.joining(", "))));
        uiContextMap.put(context.getId(), context);
    });

}

From source file:com.ethercamp.harmony.jsonrpc.EthJsonRpcImpl.java

public String web3_clientVersion() {
    Pattern shortVersion = Pattern.compile("(\\d\\.\\d).*");
    Matcher matcher = shortVersion.matcher(System.getProperty("java.version"));
    matcher.matches();//  w  w w  .  j  av  a2 s  .  c om

    return Arrays
            .asList("Harmony", "v" + config.projectVersion(), System.getProperty("os.name"),
                    "Java" + matcher.group(1), config.projectVersionModifier() + "-" + BuildInfo.buildHash)
            .stream().collect(Collectors.joining("/"));
}

From source file:org.hawkular.agent.monitor.util.Util.java

/**
 * Tries to determine the system ID for the machine where this JVM is located.
 *
 * @return system ID or null if cannot determine
 *//*from  ww  w  .ja  va  2  s . c  o  m*/
public static String getSystemId() {
    if (systemId == null) {
        File machineIdFile = new File("/etc/machine-id");
        if (machineIdFile.exists() && machineIdFile.canRead()) {
            try (Reader reader = new InputStreamReader(new FileInputStream(machineIdFile))) {
                systemId = new BufferedReader(reader).lines().collect(Collectors.joining("\n"));
            } catch (IOException e) {
                log.errorf(e,
                        "/etc/machine-id exists and is readable, but exception was raised when reading it");
                systemId = "";
            }
        } else {
            log.errorf("/etc/machine-id does not exist or is unreadable");
            // for the future, we might want to check additional places and try different things
            systemId = "";
        }
    }

    return (systemId.isEmpty()) ? null : systemId;
}

From source file:eu.freme.common.rest.OwnedResourceManagingController.java

@RequestMapping(method = RequestMethod.GET)
@Secured({ "ROLE_USER", "ROLE_ADMIN" })
public ResponseEntity<String> getAllEntities() {
    try {//from  w  ww. ja  v a 2s . co  m
        List<Entity> entities = entityDAO.findAllReadAccessible();
        String serialization = entities.stream().map(p -> {
            try {
                return p.toJson();
            } catch (JsonProcessingException e) {
                throw new FREMEHttpException("Could not serialize entity with identifier=\"" + p.getIdentifier()
                        + "\" to JSON. " + e.getMessage());
            }
        }).collect(Collectors.joining(",\n"));

        HttpHeaders responseHeaders = new HttpHeaders();
        //ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
        //String serialization = ow.writeValueAsString(entities);
        responseHeaders.add("Content-Type", SerializationFormatMapper.JSON);
        return new ResponseEntity<>("[" + serialization + "]", responseHeaders, HttpStatus.OK);
    } catch (FREMEHttpException ex) {
        logger.error(ex.getMessage());
        throw ex;
    } catch (Exception ex) {
        logger.error(ex.getMessage());
        throw new FREMEHttpException(ex.getMessage());
    }
}

From source file:com.intuit.wasabi.tests.service.priority.BasicPriorityTest.java

private void assertPriority(Experiment assertExperiment) {
    String exclusion = "{\"experimentIDs\": ["
            + validExperimentsLists.stream().map(s -> "\"" + s.id + "\"").collect(Collectors.joining(","))
            + "]}";
    exclusion = exclusion.replace(validExperimentsLists.get(0).id, assertExperiment.id);
    response = apiServerConnector//from   w  ww .j a  v a 2 s.  com
            .doPut("applications/" + validExperimentsLists.get(0).applicationName + "/priorities", exclusion);
    assertReturnCode(response, HttpStatus.SC_NO_CONTENT);
    response = apiServerConnector
            .doGet("applications/" + validExperimentsLists.get(0).applicationName + "/priorities");
    LOGGER.debug("output: " + response.asString());
    assertReturnCode(response, HttpStatus.SC_OK);
    Type listType = new TypeToken<Map<String, ArrayList<Map<String, Object>>>>() {
    }.getType();
    Map<String, List<Map<String, Object>>> resultMap = new Gson().fromJson(response.asString(), listType);
    List<Map<String, Object>> prioritizedExperiments = resultMap.get("prioritizedExperiments");
    Assert.assertEquals(prioritizedExperiments.size(), validExperimentsLists.size());
}