List of usage examples for java.util Arrays stream
public static DoubleStream stream(double[] array)
From source file:alfio.model.system.Configuration.java
private static ConfigurationPathKey from(Optional<Integer> organizationId, Optional<Integer> eventId, Optional<Integer> ticketCategoryId, ConfigurationKeys key) { boolean organizationAvailable = organizationId.isPresent(); boolean eventAvailable = eventId.isPresent(); boolean categoryAvailable = ticketCategoryId.isPresent(); ConfigurationPathLevel mostSensible = Arrays.stream(ConfigurationPathLevel.values()) .sorted(Comparator.<ConfigurationPathLevel>naturalOrder().reversed()).filter( path -> path == ConfigurationPathLevel.ORGANIZATION && organizationAvailable || path == ConfigurationPathLevel.EVENT && organizationAvailable && eventAvailable || path == ConfigurationPathLevel.TICKET_CATEGORY && organizationAvailable && eventAvailable && categoryAvailable) .findFirst().orElse(ConfigurationPathLevel.SYSTEM); switch (mostSensible) { case ORGANIZATION: return getOrganizationConfiguration(organizationId.get(), key); case EVENT://from w w w.ja v a2 s . c om return getEventConfiguration(organizationId.get(), eventId.get(), key); case TICKET_CATEGORY: return getTicketCategoryConfiguration(organizationId.get(), eventId.get(), ticketCategoryId.get(), key); } return getSystemConfiguration(key); }
From source file:com.uber.hoodie.common.table.view.HoodieTableFileSystemView.java
private Stream<HoodieLogFile> convertFileStatusesToLogFiles(FileStatus[] statuses) { Predicate<FileStatus> rtFilePredicate = fileStatus -> fileStatus.getPath().getName() .contains(metaClient.getTableConfig().getRTFileFormat().getFileExtension()); return Arrays.stream(statuses).filter(rtFilePredicate).map(HoodieLogFile::new); }
From source file:com.netflix.spinnaker.clouddriver.kubernetes.v2.op.job.KubectlJobExecutor.java
public List<String> delete(KubernetesV2Credentials credentials, KubernetesKind kind, String namespace, String name, KubernetesSelectorList labelSelectors, V1DeleteOptions deleteOptions) { List<String> command = kubectlNamespacedAuthPrefix(credentials, namespace); command.add("delete"); command = kubectlLookupInfo(command, kind, name, labelSelectors); // spinnaker generally accepts deletes of resources that don't exist command.add("--ignore-not-found=true"); if (deleteOptions.isOrphanDependents() != null) { command.add("--cascade=" + !deleteOptions.isOrphanDependents()); }//from w w w.j a v a 2s. c o m if (deleteOptions.getGracePeriodSeconds() != null) { command.add("--grace-period=" + deleteOptions.getGracePeriodSeconds()); } if (StringUtils.isNotEmpty(deleteOptions.getPropagationPolicy())) { throw new IllegalArgumentException("Propagation policy is not yet supported as a delete option"); } String jobId = jobExecutor.startJob(new JobRequest(command), System.getenv(), new ByteArrayInputStream(new byte[0])); JobStatus status = backoffWait(jobId, credentials.isDebug()); if (status.getResult() != JobStatus.Result.SUCCESS) { throw new KubectlException( "Failed to delete " + kind + "/" + name + " from " + namespace + ": " + status.getStdErr()); } if (StringUtils.isEmpty(status.getStdOut())) { return new ArrayList<>(); } return Arrays.stream(status.getStdOut().split("\n")).map(m -> m.substring(m.indexOf("\"") + 1)) .map(m -> m.substring(0, m.lastIndexOf("\""))).collect(Collectors.toList()); }
From source file:com.simiacryptus.mindseye.lang.Layer.java
/** * Eval and free nn result./*from w w w.j a va 2 s . co m*/ * * @param array the array * @return the nn result */ @Nullable default Result evalAndFree(Result... array) { Result result = eval(array); Arrays.stream(array).map(Result::getData).forEach(ReferenceCounting::freeRef); Arrays.stream(array).forEach(ReferenceCounting::freeRef); return result; }
From source file:com.netflix.spinnaker.halyard.backup.services.v1.BackupService.java
private void addFileToTar(TarArchiveOutputStream tarArchiveOutputStream, String path, String base) { File file = new File(path); String fileName = file.getName(); if (Arrays.stream(omitPaths).anyMatch(s -> s.equals(fileName))) { return;// w w w . java 2 s .com } String tarEntryName = String.join("/", base, fileName); try { if (file.isFile()) { TarArchiveEntry tarEntry = new TarArchiveEntry(file, tarEntryName); tarArchiveOutputStream.putArchiveEntry(tarEntry); IOUtils.copy(new FileInputStream(file), tarArchiveOutputStream); tarArchiveOutputStream.closeArchiveEntry(); } else if (file.isDirectory()) { Arrays.stream(file.listFiles()).filter(Objects::nonNull) .forEach(f -> addFileToTar(tarArchiveOutputStream, f.getAbsolutePath(), tarEntryName)); } else { log.warn("Unknown file type: " + file + " - skipping addition to tar archive"); } } catch (IOException e) { throw new HalException(Problem.Severity.FATAL, "Unable to file " + file.getName() + " to archive entry: " + tarEntryName + " " + e.getMessage(), e); } }
From source file:com.wso2telco.core.userprofile.permission.impl.WSO2PermissionBuilder.java
/** * recuresvly build the permission tree and return as tree of maps * //from ww w . ja va 2 s . c o m * @param rootPermissionTree * @return */ private RetunEntitiy popUserRolePermissions(UIPermissionNode[] rootPermissionTree) { RetunEntitiy entity = new RetunEntitiy(); Arrays.stream(rootPermissionTree).forEach(item -> { /** * if node has child elements */ UIPermissionNode[] uiPermissionArray = item.getNodeList(); if (uiPermissionArray != null && uiPermissionArray.length > 0) { RetunEntitiy temp = popUserRolePermissions(uiPermissionArray); entity.mergeMapEntry(item.getDisplayName(), temp); } else { /** * node don't have children */ entity.mergeMapEntry(item); } }); return entity; }
From source file:com.hurence.logisland.service.solr.Solr_6_4_2_ChronixClientService.java
protected void createChronixStorage(ControllerServiceInitializationContext context) throws ProcessException { if (updater != null) { return;// ww w. ja va 2s. com } // setup a thread pool of solr updaters int batchSize = context.getPropertyValue(BATCH_SIZE).asInteger(); long flushInterval = context.getPropertyValue(FLUSH_INTERVAL).asLong(); String[] groupBy = context.getPropertyValue(GROUP_BY).asString().split(","); updater = new ChronixUpdater(solr, queue, Arrays.stream(groupBy).filter(StringUtils::isNotBlank).collect(Collectors.toList()), batchSize, flushInterval); executorService.execute(updater); }
From source file:org.mascherl.example.page.SignUpPage.java
private void addValidationErrors(Model model, String... fields) { Arrays.stream(fields).filter(this::hasValidationError).forEach(field -> model.put(field + "Error", true)); }
From source file:org.shredzone.cilla.view.FeedView.java
/** * Converts the feed's suffix to a ROME type. * * @param suffix/*from w w w . jav a 2 s . c om*/ * the feed's suffix * @return ROME type * @throws PageNotFoundException * if the feed suffix is unknown */ private String convertFeedType(String suffix) throws PageNotFoundException { return Arrays.stream(FeedType.values()).filter(type -> type.getSuffix().equals(suffix)) .map(FeedType::getType).findFirst() .orElseThrow(() -> new PageNotFoundException("Unknown feed type " + suffix)); }
From source file:com.blackducksoftware.integration.hub.detect.configuration.DetectConfiguration.java
public Set<String> getBlackduckPropertyKeys() { final Set<String> providedKeys = detectPropertySource.getBlackduckPropertyKeys(); final Set<String> allKeys = new HashSet<>(providedKeys); Arrays.stream(DetectProperty.values()).forEach(currentProperty -> { final String propertyKey = currentProperty.getPropertyKey(); if (propertyKey .startsWith(BlackDuckServerConfigBuilder.BLACKDUCK_SERVER_CONFIG_ENVIRONMENT_VARIABLE_PREFIX) || propertyKey/* w w w . j a v a 2 s. com*/ .startsWith(BlackDuckServerConfigBuilder.BLACKDUCK_SERVER_CONFIG_PROPERTY_KEY_PREFIX)) { allKeys.add(propertyKey); } else if (propertyKey.startsWith(DetectPropertySource.BLACKDUCK_PROPERTY_PREFIX) || propertyKey.startsWith(DetectPropertySource.BLACKDUCK_ENVIRONMENT_PREFIX)) { allKeys.add(propertyKey); } }); return allKeys; }