List of usage examples for java.util.stream Collectors joining
public static Collector<CharSequence, ?, String> joining(CharSequence delimiter)
From source file:com.creactiviti.piper.core.task.SpelTaskEvaluator.java
private <T> MethodExecutor join() { return (ctx, target, args) -> { String separator = (String) args[0]; List<T> values = (List<T>) args[1]; String str = values.stream().map(String::valueOf).collect(Collectors.joining(separator)); return new TypedValue(str); };/*from w w w . j a v a 2 s . co m*/ }
From source file:com.kantenkugel.discordbot.moduleutils.DocParser.java
private static void parse(String name, InputStream inputStream) { String[] nameSplits = name.split("[/\\.]"); String className = nameSplits[nameSplits.length - 2]; docs.putIfAbsent(className.toLowerCase(), new ArrayList<>()); List<Documentation> docs = DocParser.docs.get(className.toLowerCase()); try (BufferedReader buffer = new BufferedReader(new InputStreamReader(inputStream))) { String content = buffer.lines().collect(Collectors.joining("\n")); Matcher matcher = DOCS_PATTERN.matcher(content); while (matcher.find()) { String method = matcher.group(2).trim(); if (method.contains("class ") || method.contains("interface ")) { continue; }/*from w ww . j av a 2 s . c o m*/ if (method.endsWith("{")) method = method.substring(0, method.length() - 1).trim(); Matcher m2 = METHOD_PATTERN.matcher(method); if (!m2.find()) continue; String methodName = m2.group(1); List<String> argTypes = new ArrayList<>(); m2 = METHOD_ARG_PATTERN.matcher(m2.group(2)); while (m2.find()) argTypes.add(m2.group(1)); List<String> docText = cleanupDocs(matcher.group(1)); String returns = null; Map<String, String> args = new HashMap<>(); Map<String, String> throwing = new HashMap<>(); String desc = null; for (String line : docText) { if (!line.isEmpty() && line.charAt(0) == '@') { if (line.startsWith("@return ")) returns = line.substring(8); else if (line.startsWith("@param ")) { String[] split = line.split("\\s+", 3); args.put(split[1], split.length == 3 ? split[2] : "*No Description*"); } else if (line.startsWith("@throws ")) { String[] split = line.split("\\s+", 3); throwing.put(split[1], split.length == 3 ? split[2] : "*No Description*"); } } else { desc = desc == null ? line : desc + '\n' + line; } } docs.add(new Documentation(methodName, argTypes, method, desc, returns, args, throwing)); } } catch (IOException ignored) { } try { inputStream.close(); } catch (IOException e) { LOG.log(e); } }
From source file:com.ikanow.aleph2.storage_service_hdfs.services.TestMockHdfsStorageSystem.java
@Test public void test_validate() { final String temp_dir = System.getProperty("java.io.tmpdir") + File.separator; GlobalPropertiesBean globals = BeanTemplateUtils.build(GlobalPropertiesBean.class) .with(GlobalPropertiesBean::distributed_root_dir, temp_dir) .with(GlobalPropertiesBean::local_yarn_config_dir, System.getenv("HADOOP_CONF_DIR")).done().get(); MockHdfsStorageService storageService = new MockHdfsStorageService(globals); // Works/*from w w w . j a va2s. c o m*/ { final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class) .with(DataBucketBean::full_name, "/test/validate/bucket").done().get(); Tuple2<String, List<BasicMessageBean>> res = storageService.validateSchema(null, bucket); assertEquals( "Validation: " + res._2().stream().map(BasicMessageBean::message).collect(Collectors.joining("\n")), 0, res._2().size()); assertEquals((temp_dir.replace(File.separator, "/") + "/data/test/validate/bucket/managed_bucket/") .replaceAll("//", "/"), res._1().replace(File.separator, "/").replaceAll("//", "/")); } // Works some more Arrays.asList("gz", "gzip", "sz", "snappy", "fr.sz", "snappy_framed").stream() .map(s -> buildBucketWithCodec(s)).forEach(bucket -> { Tuple2<String, List<BasicMessageBean>> res = storageService .validateSchema(bucket.data_schema().storage_schema(), bucket); assertEquals("Validation: " + res._2().stream().map(BasicMessageBean::message).collect(Collectors.joining("\n")), 0, res._2().size()); assertEquals( (temp_dir.replace(File.separator, "/") + "/data/" + bucket.full_name() + IStorageService.BUCKET_SUFFIX).replaceAll("//", "/"), res._1().replace(File.separator, "/").replaceAll("//", "/")); }); // Fails Arrays.asList("banana").stream().map(s -> buildBucketWithCodec(s)).forEach(bucket -> { Tuple2<String, List<BasicMessageBean>> res = storageService .validateSchema(bucket.data_schema().storage_schema(), bucket); assertEquals( "Validation: " + res._2().stream().map(BasicMessageBean::message).collect(Collectors.joining("\n")), 1, res._2().size()); assertEquals("", res._1()); }); }
From source file:com.diversityarrays.kdxplore.trials.SampleGroupExportDialog.java
static public Set<Integer> getExcludedTraitIds(Component comp, String msgTitle, KdxploreDatabase kdxdb, SampleGroup sg) {/* www .ja v a2s.c o m*/ Set<Integer> excludeTheseTraitIds = new HashSet<>(); try { Set<Integer> traitIds = collectTraitIdsFromSamples(kdxdb, sg); List<Trait> undecidableTraits = new ArrayList<>(); Set<Integer> missingTraitIds = new TreeSet<>(); Map<Integer, Trait> traitMap = kdxdb.getKDXploreKSmartDatabase().getTraitMap(); for (Integer traitId : traitIds) { Trait t = traitMap.get(traitId); if (t == null) { missingTraitIds.add(traitId); } else if (TraitLevel.UNDECIDABLE == t.getTraitLevel()) { undecidableTraits.add(t); } } if (!missingTraitIds.isEmpty()) { String msg = missingTraitIds.stream().map(i -> Integer.toString(i)) .collect(Collectors.joining(",")); MsgBox.error(comp, msg, "Missing Trait IDs"); return null; } if (!undecidableTraits.isEmpty()) { String msg = undecidableTraits.stream().map(Trait::getTraitName) .collect(Collectors.joining("\n", "Traits that are neither Plot nor Sub-Plot:\n", "\nDo you want to continue and Exclude samples for those Traits?")); if (JOptionPane.YES_OPTION != JOptionPane.showConfirmDialog(comp, msg, msgTitle, JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE)) { return null; } Set<Integer> tmp = undecidableTraits.stream().map(Trait::getTraitId).collect(Collectors.toSet()); excludeTheseTraitIds.addAll(tmp); } } catch (IOException e) { MsgBox.error(comp, "Unable to read samples from database\n" + e.getMessage(), msgTitle); return null; } return excludeTheseTraitIds; }
From source file:com.netflix.subtitles.ttml.TtmlUtilsTest.java
@Test public void moveAllStyleReferencesFromBodyAndDivToP() throws Exception { /* PREPARATION */ TtEltype tt = new TtEltype(); tt.setHead(new HeadEltype()); tt.setBody(new BodyEltype()); HeadEltype head = tt.getHead();//from w w w .j a v a 2s . c o m head.setStyling(new StylingEltype()); StylingEltype styling = head.getStyling(); styling.getStyle().add(createStyle("style0")); styling.getStyle().add(createStyle("style1")); styling.getStyle().add(createStyle("style2")); styling.getStyle().add(createStyle("style3")); styling.getStyle().add(createStyle("style4")); BodyEltype body = tt.getBody(); body.getStyle().add(styling.getStyle().get(0).getId()); body.getStyle().add(styling.getStyle().get(1).getId()); body.getDiv().add(new DivEltype()); DivEltype div = body.getDiv().get(0); div.getStyle().add(styling.getStyle().get(2).getId()); div.getBlockClass().add(new PBuilder().withStyle(styling.getStyle().get(3).getId()).build()); div.getBlockClass().add(new PBuilder().withStyle(styling.getStyle().get(4).getId()).build()); /* EXECUTION */ TtmlUtils.moveStyleRefToP(tt); /* VALIDATION */ Field bodyField = body.getClass().getDeclaredField(STYLE_FIELD); bodyField.setAccessible(true); Field divField = body.getClass().getDeclaredField(STYLE_FIELD); divField.setAccessible(true); assertEquals(null, bodyField.get(body)); assertEquals(null, divField.get(body)); assertEquals("style0 style1 style2 style3", p(div.getBlockClass().get(0)).getStyle().stream() .map(Object::toString).collect(Collectors.joining(" "))); assertEquals("style0 style1 style2 style4", p(div.getBlockClass().get(1)).getStyle().stream() .map(Object::toString).collect(Collectors.joining(" "))); }
From source file:com.uber.hoodie.utilities.sources.DFSSource.java
@Override public Pair<Optional<JavaRDD<GenericRecord>>, String> fetchNewData(Optional<String> lastCheckpointStr, long maxInputBytes) { try {/*ww w . j a va 2 s . c om*/ // obtain all eligible files under root folder. List<FileStatus> eligibleFiles = new ArrayList<>(); RemoteIterator<LocatedFileStatus> fitr = fs .listFiles(new Path(config.getString(Config.ROOT_INPUT_PATH_PROP)), true); while (fitr.hasNext()) { LocatedFileStatus fileStatus = fitr.next(); if (fileStatus.isDirectory() || IGNORE_FILEPREFIX_LIST.stream() .filter(pfx -> fileStatus.getPath().getName().startsWith(pfx)).count() > 0) { continue; } eligibleFiles.add(fileStatus); } // sort them by modification time. eligibleFiles.sort((FileStatus f1, FileStatus f2) -> Long.valueOf(f1.getModificationTime()) .compareTo(Long.valueOf(f2.getModificationTime()))); // Filter based on checkpoint & input size, if needed long currentBytes = 0; long maxModificationTime = Long.MIN_VALUE; List<FileStatus> filteredFiles = new ArrayList<>(); for (FileStatus f : eligibleFiles) { if (lastCheckpointStr.isPresent() && f.getModificationTime() <= Long.valueOf(lastCheckpointStr.get())) { // skip processed files continue; } maxModificationTime = f.getModificationTime(); currentBytes += f.getLen(); filteredFiles.add(f); if (currentBytes >= maxInputBytes) { // we have enough data, we are done break; } } // no data to read if (filteredFiles.size() == 0) { return new ImmutablePair<>(Optional.empty(), lastCheckpointStr.isPresent() ? lastCheckpointStr.get() : String.valueOf(Long.MIN_VALUE)); } // read the files out. String pathStr = filteredFiles.stream().map(f -> f.getPath().toString()) .collect(Collectors.joining(",")); String schemaStr = schemaProvider.getSourceSchema().toString(); final AvroConvertor avroConvertor = new AvroConvertor(schemaStr); return new ImmutablePair<>( Optional.of(DFSSource.fromFiles(dataFormat, avroConvertor, pathStr, sparkContext)), String.valueOf(maxModificationTime)); } catch (IOException ioe) { throw new HoodieIOException("Unable to read from source from checkpoint: " + lastCheckpointStr, ioe); } }
From source file:de.blizzy.rust.lootconfig.LootConfigDump.java
private String formatBlueprints(Category category) { return category.Blueprints.stream() .sorted((blueprintSpawn1, blueprintSpawn2) -> Collator.getInstance() .compare(blueprintSpawn1.item.Shortname, blueprintSpawn2.item.Shortname)) .peek(blueprintSpawn -> { if (blueprintSpawn.item == null) { throw new IllegalStateException("blueprint spawn without item"); }//from ww w. ja va2 s . c om }).map(blueprintSpawn -> blueprintSpawn.item.Shortname).collect(Collectors.joining(", ")); }
From source file:com.github.mrenou.jacksonatic.internal.AnnotatedClassLogger.java
private static String annotationsItToStr(Iterable<Annotation> annotations) { return stream(annotations).filter(annotation -> JacksonaticAnnotation.class.isInstance(annotation)) .map(Annotation::toString).collect(Collectors.joining(",")); }
From source file:com.ethercamp.harmony.web.filter.JsonRpcUsageFilter.java
private void notifyInvocation(JsonNode requestJson, JsonNode responseJson) throws IOException { if (responseJson.has("error")) { final String errorMessage = responseJson.get("error").toString(); log.warn("Problem when invoking JSON-RPC " + requestJson.toString() + " response:" + errorMessage); } else {/* w ww. ja v a2 s . c om*/ final String methodName = requestJson.get("method").asText(); final List<JsonNode> params = new ArrayList<>(); if (requestJson.has("params")) { requestJson.get("params").forEach(n -> params.add(n)); } final String responseText = mapper.writeValueAsString(responseJson); jsonRpcUsageService.methodInvoked(methodName, responseText); if (log.isInfoEnabled()) { // passwords could be sent here if (!EXCLUDE_LOGS.contains(methodName)) { log.info(methodName + "(" + params.stream().map(n -> n.asText()).collect(Collectors.joining(", ")) + "): " + responseText); } else { // logging is handled manually in service } } } }
From source file:com.evolveum.midpoint.notifications.impl.notifiers.SimpleCaseManagementNotifier.java
private void appendAssigneeInformation(StringBuilder sb, CaseWorkItemEvent event, OperationResult result) { CaseWorkItemType workItem = event.getWorkItem(); ObjectReferenceType originalAssignee = workItem.getOriginalAssigneeRef(); List<ObjectReferenceType> currentAssignees = workItem.getAssigneeRef(); boolean atLeastOne = false; if (currentAssignees.size() != 1 || !java.util.Objects.equals(originalAssignee.getOid(), currentAssignees.get(0).getOid())) { UserType originalAssigneeObject = (UserType) functions.getObjectType(originalAssignee, true, result); sb.append("Originally allocated to: ") .append(textFormatter.formatUserName(originalAssigneeObject, originalAssignee.getOid())) .append("\n"); atLeastOne = true;/*from ww w . ja va 2 s . c om*/ } if (!workItem.getAssigneeRef().isEmpty()) { sb.append("Allocated to: "); sb.append(workItem.getAssigneeRef().stream().map(ref -> textFormatter.formatUserName(ref, result)) .collect(Collectors.joining(", "))); sb.append("\n"); atLeastOne = true; } if (atLeastOne) { sb.append("\n"); } }