Example usage for java.util Arrays stream

List of usage examples for java.util Arrays stream

Introduction

In this page you can find the example usage for java.util Arrays stream.

Prototype

public static DoubleStream stream(double[] array) 

Source Link

Document

Returns a sequential DoubleStream with the specified array as its source.

Usage

From source file:com.avast.server.hdfsshell.ui.ShellPromptProvider.java

private boolean isRootPrompt() {
    final String whoami = this.getWhoami();
    final String[] groupsForUser = contextCommands.getGroupsForUser(whoami);
    if (groupsForUser.length == 0) { //make guess
        return "root".equals(whoami) || "hdfs".equals(whoami);
    }/*from  w  w w . j  a  v  a  2s. co  m*/
    final String[] groups = contextCommands.getConfiguration()
            .get("dfs.permissions.superusergroup", "supergroup").split(",");
    final Set<String> adminGroups = Arrays.stream(groups).map(String::trim).collect(Collectors.toSet());
    adminGroups.add("Administrators");//for Windows
    adminGroups.add("hdfs");//special cases
    adminGroups.add("root");
    return Arrays.stream(groupsForUser).anyMatch(adminGroups::contains);
}

From source file:com.haulmont.cuba.web.sys.singleapp.SingleAppWebServletListener.java

@Override
public void contextInitialized(ServletContextEvent sce) {
    try {/* w  ww.ja  v  a2  s. c  o m*/
        ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
        //need to put the following class to WebAppClassLoader, to share it between for web and core
        contextClassLoader.loadClass("com.haulmont.cuba.core.sys.remoting.LocalServiceDirectory");

        ServletContext servletContext = sce.getServletContext();
        String dependenciesFile;
        try {
            dependenciesFile = IOUtils.toString(servletContext.getResourceAsStream("/WEB-INF/web.dependencies"),
                    "UTF-8");
        } catch (IOException e) {
            throw new RuntimeException("An error occurred while loading dependencies file", e);
        }

        String[] dependenciesNames = dependenciesFile.split("\\n");
        URL[] urls = Arrays.stream(dependenciesNames).map((String name) -> {
            try {
                return servletContext.getResource("/WEB-INF/lib/" + name);
            } catch (MalformedURLException e) {
                throw new RuntimeException("An error occurred while loading dependency " + name, e);
            }
        }).toArray(URL[]::new);
        URLClassLoader webClassLoader = new CubaSingleAppClassLoader(urls, contextClassLoader);

        Thread.currentThread().setContextClassLoader(webClassLoader);
        Class<?> appContextLoaderClass = webClassLoader.loadClass(getAppContextLoaderClassName());
        appContextLoader = appContextLoaderClass.newInstance();

        Method setJarsNamesMethod = ReflectionUtils.findMethod(appContextLoaderClass, "setJarNames",
                String.class);
        ReflectionUtils.invokeMethod(setJarsNamesMethod, appContextLoader, dependenciesFile);

        Method contextInitializedMethod = ReflectionUtils.findMethod(appContextLoaderClass,
                "contextInitialized", ServletContextEvent.class);
        ReflectionUtils.invokeMethod(contextInitializedMethod, appContextLoader, sce);

        Thread.currentThread().setContextClassLoader(contextClassLoader);
    } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
        throw new RuntimeException("An error occurred while starting single WAR application", e);
    }
}

From source file:com.uber.hoodie.hadoop.realtime.HoodieRealtimeInputFormat.java

@Override
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {

    Stream<FileSplit> fileSplits = Arrays.stream(super.getSplits(job, numSplits)).map(is -> (FileSplit) is);

    // obtain all unique parent folders for splits
    Map<Path, List<FileSplit>> partitionsToParquetSplits = fileSplits
            .collect(Collectors.groupingBy(split -> split.getPath().getParent()));
    // TODO(vc): Should we handle also non-hoodie splits here?
    Map<String, HoodieTableMetaClient> metaClientMap = new HashMap<>();
    Map<Path, HoodieTableMetaClient> partitionsToMetaClient = partitionsToParquetSplits.keySet().stream()
            .collect(Collectors.toMap(Function.identity(), p -> {
                // find if we have a metaclient already for this partition.
                Optional<String> matchingBasePath = metaClientMap.keySet().stream()
                        .filter(basePath -> p.toString().startsWith(basePath)).findFirst();
                if (matchingBasePath.isPresent()) {
                    return metaClientMap.get(matchingBasePath.get());
                }/* w  ww .  j  a va  2s . c om*/

                try {
                    HoodieTableMetaClient metaClient = getTableMetaClient(p.getFileSystem(conf), p);
                    metaClientMap.put(metaClient.getBasePath(), metaClient);
                    return metaClient;
                } catch (IOException e) {
                    throw new HoodieIOException("Error creating hoodie meta client against : " + p, e);
                }
            }));

    // for all unique split parents, obtain all delta files based on delta commit timeline, grouped on file id
    List<HoodieRealtimeFileSplit> rtSplits = new ArrayList<>();
    partitionsToParquetSplits.keySet().stream().forEach(partitionPath -> {
        // for each partition path obtain the data & log file groupings, then map back to inputsplits
        HoodieTableMetaClient metaClient = partitionsToMetaClient.get(partitionPath);
        HoodieTableFileSystemView fsView = new HoodieTableFileSystemView(metaClient,
                metaClient.getActiveTimeline());
        String relPartitionPath = FSUtils.getRelativePartitionPath(new Path(metaClient.getBasePath()),
                partitionPath);

        try {
            Stream<FileSlice> latestFileSlices = fsView.getLatestFileSlices(relPartitionPath);

            // subgroup splits again by file id & match with log files.
            Map<String, List<FileSplit>> groupedInputSplits = partitionsToParquetSplits.get(partitionPath)
                    .stream()
                    .collect(Collectors.groupingBy(split -> FSUtils.getFileId(split.getPath().getName())));
            latestFileSlices.forEach(fileSlice -> {
                List<FileSplit> dataFileSplits = groupedInputSplits.get(fileSlice.getFileId());
                dataFileSplits.forEach(split -> {
                    try {
                        List<String> logFilePaths = fileSlice.getLogFiles()
                                .map(logFile -> logFile.getPath().toString()).collect(Collectors.toList());
                        // Get the maxCommit from the last delta or compaction or commit - when bootstrapped from COW table
                        String maxCommitTime = metaClient.getActiveTimeline()
                                .getTimelineOfActions(Sets.newHashSet(HoodieTimeline.COMMIT_ACTION,
                                        HoodieTimeline.COMPACTION_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION))
                                .filterCompletedInstants().lastInstant().get().getTimestamp();
                        rtSplits.add(new HoodieRealtimeFileSplit(split, logFilePaths, maxCommitTime));
                    } catch (IOException e) {
                        throw new HoodieIOException("Error creating hoodie real time split ", e);
                    }
                });
            });
        } catch (Exception e) {
            throw new HoodieException("Error obtaining data file/log file grouping: " + partitionPath, e);
        }
    });
    LOG.info("Returning a total splits of " + rtSplits.size());
    return rtSplits.toArray(new InputSplit[rtSplits.size()]);
}

From source file:com.ethercamp.harmony.service.JsonRpcUsageService.java

private void init(int port) {
    final String serverUrl = "http://localhost:" + port + AppConst.JSON_RPC_PATH;

    /**//from  w w  w. ja v a2 s . c  om
     * Load conf file with curl examples per each JSON-RPC method.
     */
    Config config = ConfigFactory.load("json-rpc-help");
    if (config.hasPath("doc.curlExamples")) {

        Map<String, String> curlExamples = config.getAnyRefList("doc.curlExamples").stream()
                .map(e -> (HashMap<String, String>) e).collect(Collectors.toMap(e -> e.get("method"),
                        e -> e.get("curl").replace("${host}", serverUrl)));

        /**
         * Initialize empty stats for all methods.
         */
        Arrays.stream(jsonRpc.ethj_listAvailableMethods()).forEach(line -> {
            final String methodName = line.split(" ")[0];
            String curlExample = curlExamples.get(methodName);
            if (curlExample == null) {
                curlExample = generateCurlExample(line) + " " + serverUrl;
                //                            log.debug("Generate curl example for JSON-RPC method: " + methodName);
            }
            stats.put(methodName, new CallStats(methodName, 0l, null, curlExample));
        });
    }
}

From source file:koper.util.ReflectUtil.java

public static Optional<Method> getMethod(Class<?> clazz, String eventName, Predicate<Method> methodPredicate) {
    final Method[] methods = clazz.getDeclaredMethods();

    return Arrays.stream(methods).filter(method -> method.getName().equals(eventName)).filter(methodPredicate)
            .findAny();//w ww . j  a v  a2s  .  c  o  m
}

From source file:Accuracy.DepthMaskFactory.java

/**
 * Get a depth mask//from   ww w .j a v  a 2  s . c o  m
 * @param readCounts The reads to mask
 * @param dontUse A list of genotypes not to mask
 * @param caller The genotype caller to use
 * @return A depth mask
 * @throws NotEnoughMaskableGenotypesException If there is not enough maskable genotypes
 */
public DepthMask getDepthMask(int[][][] readCounts, List<SingleGenotypePosition> dontUse, Caller caller)
        throws NotEnoughMaskableGenotypesException {
    ComparableDistribution<Integer> fulldist = Arrays.stream(readCounts).parallel()
            .flatMap(rc -> Arrays.stream(rc).map(r -> r[0] + r[1]))
            .collect(new ComparableDistributionCollector<>());
    ComparableDistribution<Integer> dist = fulldist.limitTo(0, limitDist);

    return new DepthMask(readCounts, number, minDepth, dist, method, dontUse, caller);
}

From source file:io.github.jeddict.jcode.util.PersistenceUtil.java

public static void updatePersistenceUnit(Project project, PersistenceUnit persistenceUnit) {
    PUDataObject pud;//from ww w  .  ja v  a  2  s  . c  o m
    try {
        pud = ProviderUtil.getPUDataObject(project);
        if (!Arrays.stream(pud.getPersistence().getPersistenceUnit())
                .filter(pu -> Objects.equals(pu, persistenceUnit)).findAny().isPresent()) {
            pud.addPersistenceUnit(persistenceUnit);
        }
        pud.modelUpdated();
        pud.save();
    } catch (InvalidPersistenceXmlException ex) {
        Exceptions.printStackTrace(ex);
    }
}

From source file:com.sonicle.webtop.core.app.util.ClassHelper.java

public static Set<Annotation> getClassAnnotations(Class clazz, Class annotationClass) {
    if (clazz == null)
        return null;
    Annotation[] annotations = clazz.getAnnotationsByType(annotationClass);
    return Arrays.stream(annotations).unordered().collect(Collectors.toSet());
}

From source file:io.pivotal.strepsirrhini.chaoslemur.RandomFateEngine.java

private boolean isWhitelisted(Member member) {
    return this.whitelist.length == 0 || Arrays.stream(this.whitelist)
            .anyMatch(s -> member.getDeployment().equalsIgnoreCase(s) || member.getJob().equalsIgnoreCase(s));
}

From source file:org.pdfsam.ui.dashboard.preference.PreferenceConfig.java

@Bean(name = "themeCombo")
public PreferenceComboBox<KeyStringValueItem<String>> themeCombo() {
    PreferenceComboBox<KeyStringValueItem<String>> themeCombo = new PreferenceComboBox<>(
            StringUserPreference.THEME, userContext);
    themeCombo.setId("themeCombo");
    themeCombo.getItems().addAll(Arrays.stream(Theme.values())
            .map(t -> keyValue(t.toString(), t.friendlyName())).collect(Collectors.toList()));

    themeCombo.setValue(keyEmptyValue(userContext.getTheme()));
    return themeCombo;
}