Example usage for java.util Arrays stream

List of usage examples for java.util Arrays stream

Introduction

In this page you can find the example usage for java.util Arrays stream.

Prototype

public static DoubleStream stream(double[] array) 

Source Link

Document

Returns a sequential DoubleStream with the specified array as its source.

Usage

From source file:com.yahoo.bard.webservice.config.ConfigResourceLoader.java

/**
 * Use a string to load a stream of all resources from the class path which match a given name.
 *
 * @param  name The class path address of a resource ('/foo' means a resource named foo in the default package)
 *
 * @return A stream of all class path resources corresponding to a particular name
 *
 * @throws IOException if any resources cannot be read from the class path successfully.
 *///from   w  w  w .  ja  v  a 2 s . co m
public Stream<Resource> loadResourcesWithName(String name) throws IOException {
    String resourceName = RESOURCE_LOADER_PREFIX + name;
    LOG.debug("Attempting to load resources named {}", resourceName);
    return Arrays.stream(resolver.getResources(resourceName))
            .peek(it -> LOG.debug(RESOURCE_LOAD_MESSAGE.logFormat(name, it)));
}

From source file:io.wcm.devops.conga.tooling.maven.plugin.PackageMojo.java

@Override
public void execute() throws MojoExecutionException, MojoFailureException {
    resourceLoader = new ResourceLoader();

    Set<String> selectedEnvironments;
    if (environments != null && environments.length > 0) {
        selectedEnvironments = ImmutableSet.copyOf(environments);
    } else {/*w  ww .j  a va  2s . c o  m*/
        selectedEnvironments = null;
    }

    // collect configuration environment directories
    File configRootDir = getTargetDir();
    List<File> environmentDirs = Arrays.stream(configRootDir.listFiles()).filter(file -> file.isDirectory())
            .filter(dir -> selectedEnvironments == null || selectedEnvironments.contains(dir.getName()))
            .collect(Collectors.toList());

    if (artifactPerEnvironment) {
        // generate an ZIP artifact with generated configurations for each environment
        for (File environmentDir : environmentDirs) {

            // classifier is environment name
            // if current project is not a config project, prefix the classifier
            String classifier = environmentDir.getName();
            if (!StringUtils.equals(project.getPackaging(), PACKAGING_CONFIGURATION)) {
                classifier = CLASSIFIER_CONFIGURATION + "-" + classifier;
            }
            validateClassifier(classifier);

            // build ZIP artifact
            File outputFile = buildZipFile(environmentDir, classifier);

            // attach ZIP artifact
            projectHelper.attachArtifact(project, outputFile, classifier);

        }
    } else {
        // generate an ZIP artifact containing all environments
        String classifier = null;
        if (!StringUtils.equals(project.getPackaging(), PACKAGING_CONFIGURATION)) {
            classifier = CLASSIFIER_CONFIGURATION;
        }
        validateClassifier(classifier);

        File outputFile = buildZipFile(configRootDir, classifier);
        // set or attach ZIP artifact
        if (StringUtils.equals(project.getPackaging(), PACKAGING_CONFIGURATION)) {
            project.getArtifact().setFile(outputFile);
        } else {
            projectHelper.attachArtifact(project, outputFile, CLASSIFIER_CONFIGURATION);
        }
    }

}

From source file:org.createnet.raptor.auth.service.services.AclTokenService.java

@Retryable(maxAttempts = 3, value = AclManagerService.AclManagerException.class, backoff = @Backoff(delay = 500, multiplier = 3))
public void register(Token token) {

    User owner = token.getUser();/*from ww  w.j  a v  a 2  s .  co m*/
    List<Permission> permissions = list(token, owner);
    Sid sid = new UserSid(owner);

    logger.debug("Found {} permissions for {}", permissions.size(), owner.getUuid());

    if (permissions.isEmpty()) {

        logger.debug("Set default permission");
        List<Permission> newPerms = Arrays.stream(defaultPermissions).collect(Collectors.toList());

        if (owner.getId().equals(token.getUser().getId())) {
            newPerms.add(RaptorPermission.ADMINISTRATION);
        }

        try {
            aclManagerService.addPermissions(Token.class, token.getId(), sid, newPerms);
        } catch (AclManagerService.AclManagerException ex) {
            logger.warn("Failed to store default permission for {} ({}): {}", token.getId(), sid,
                    ex.getMessage());
            throw ex;
        }

        permissions.addAll(newPerms);
    }

    String perms = String.join(", ", RaptorPermission.toLabel(permissions));
    logger.debug("Permission set for device {} to {} - {}", token.getName(), token.getUser().getUuid(), perms);

}

From source file:eu.itesla_project.online.tools.PrintOnlineWorkflowSimulationResultsTool.java

@Override
public void run(CommandLine line) throws Exception {
    OnlineConfig config = OnlineConfig.load();
    OnlineDb onlinedb = config.getOnlineDbFactoryClass().newInstance().create();
    String workflowId = line.getOptionValue("workflow");
    OnlineWorkflowResults wfResults = onlinedb.getResults(workflowId);
    if (wfResults != null) {
        if (!wfResults.getUnsafeContingencies().isEmpty()) {
            OnlineWorkflowParameters parameters = onlinedb.getWorkflowParameters(workflowId);
            SecurityIndexType[] securityIndexTypes = null;
            if (line.hasOption(SECURITY_INDEXES)) {
                Set<SecurityIndexType> securityIndexesTypeSet = Arrays
                        .stream(line.getOptionValue(SECURITY_INDEXES).split(","))
                        .map(SecurityIndexType::valueOf).collect(Collectors.toSet());
                securityIndexTypes = securityIndexesTypeSet
                        .toArray(new SecurityIndexType[securityIndexesTypeSet.size()]);
            } else {
                securityIndexTypes = parameters.getSecurityIndexes() == null ? SecurityIndexType.values()
                        : parameters.getSecurityIndexes()
                                .toArray(new SecurityIndexType[parameters.getSecurityIndexes().size()]);
            }/*from   w  w  w.  j a v a 2 s  .  c  o m*/
            Table table = new Table(securityIndexTypes.length + 2, BorderStyle.CLASSIC_WIDE);
            StringWriter content = new StringWriter();
            CsvWriter cvsWriter = new CsvWriter(content, ',');
            String[] headers = new String[securityIndexTypes.length + 2];
            int i = 0;
            table.addCell("Contingency", new CellStyle(CellStyle.HorizontalAlign.center));
            headers[i++] = "Contingency";
            table.addCell("State", new CellStyle(CellStyle.HorizontalAlign.center));
            headers[i++] = "State";
            for (SecurityIndexType securityIndexType : securityIndexTypes) {
                table.addCell(securityIndexType.getLabel(), new CellStyle(CellStyle.HorizontalAlign.center));
                headers[i++] = securityIndexType.getLabel();
            }
            cvsWriter.writeRecord(headers);
            for (String contingencyId : wfResults.getUnsafeContingencies()) {
                for (Integer stateId : wfResults.getUnstableStates(contingencyId)) {
                    String[] values = new String[securityIndexTypes.length + 2];
                    i = 0;
                    table.addCell(contingencyId);
                    values[i++] = contingencyId;
                    table.addCell(stateId.toString(), new CellStyle(CellStyle.HorizontalAlign.right));
                    values[i++] = stateId.toString();
                    HashMap<String, String> indexesValues = getIndexesValues(
                            wfResults.getIndexesData(contingencyId, stateId), securityIndexTypes);
                    for (SecurityIndexType securityIndexType : securityIndexTypes) {
                        table.addCell(indexesValues.get(securityIndexType.getLabel()),
                                new CellStyle(CellStyle.HorizontalAlign.center));
                        values[i++] = indexesValues.get(securityIndexType.getLabel());
                    }
                    cvsWriter.writeRecord(values);
                }
            }
            cvsWriter.flush();
            if (line.hasOption("csv"))
                System.out.println(content.toString());
            else
                System.out.println(table.render());
            cvsWriter.close();
        } else
            System.out.println("\nNo contingencies requiring T-D simulation");
    } else
        System.out.println("No results for this workflow");
    onlinedb.close();
}

From source file:com.ikanow.aleph2.v1.document_db.utils.V1DocumentDbHadoopUtils.java

/** 
 * @param input_config - the input settings
 * @return//from  w w  w.  j a va2 s .  c o m
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static IAnalyticsAccessContext<InputFormat> getInputFormat(final String user_id,
        final AnalyticThreadJobBean.AnalyticThreadJobInputBean job_input,
        final Optional<ISecurityService> maybe_security, final V1DocDbConfigBean config) {
    //TODO (ALEPH-20): need to perform security in here

    return new IAnalyticsAccessContext<InputFormat>() {
        private LinkedHashMap<String, Object> _mutable_output = null;

        @Override
        public String describe() {
            //(return the entire thing)
            return ErrorUtils.get("service_name={0} options={1}",
                    this.getAccessService().right().value().getSimpleName(),
                    this.getAccessConfig().get().entrySet().stream()
                            .filter(kv -> !DESCRIBE_FILTER.contains(kv.getKey()))
                            .collect(Collectors.toMap(kv -> kv.getKey(), kv -> kv.getValue())));
        }

        /* (non-Javadoc)
         * @see com.ikanow.aleph2.data_model.interfaces.data_analytics.IAnalyticsAccessContext#getAccessService()
         */
        @Override
        public Either<InputFormat, Class<InputFormat>> getAccessService() {
            return Either.right((Class<InputFormat>) (Class<?>) Aleph2V1InputFormat.class);
        }

        /* (non-Javadoc)
         * @see com.ikanow.aleph2.data_model.interfaces.data_analytics.IAnalyticsAccessContext#getAccessConfig()
         */
        @Override
        public Optional<Map<String, Object>> getAccessConfig() {
            if (null != _mutable_output) {
                return Optional.of(_mutable_output);
            }
            _mutable_output = new LinkedHashMap<>();

            // Parse various inputs:

            final List<String> communities = Arrays
                    .stream(job_input.resource_name_or_id()
                            .substring(BucketUtils.EXTERNAL_BUCKET_PREFIX.length()).split("_"))
                    .collect(Collectors.toList());

            // Validate communities:
            maybe_security.ifPresent(sec -> {
                communities.stream().filter(cid -> !sec.isUserPermitted(user_id, Tuples._2T("community", cid),
                        Optional.of(ISecurityService.ACTION_READ))).findAny().ifPresent(cid -> {
                            throw new RuntimeException(ErrorUtils
                                    .get(V1DocumentDbErrorUtils.V1_DOCUMENT_USER_PERMISSIONS, user_id, cid));
                        });
            });

            final String query = _mapper
                    .convertValue(Optional.ofNullable(job_input.filter()).orElse(Collections.emptyMap()),
                            JsonNode.class)
                    .toString();

            final Tuple4<String, Tuple2<Integer, Integer>, BasicDBObject, DBObject> horrible_object = LegacyV1HadoopUtils
                    .parseQueryObject(query, communities);

            final String db_server = config.mongodb_connection();

            // Here's all the fields to fill in

            // 1) Generic MongoDB fields:
            //name of job shown in jobtracker --><name>mongo.job.name</name><value>title
            //run the job verbosely ? --><name>mongo.job.verbose</name><value>true
            //Run the job in the foreground and wait for response, or background it? --><name>mongo.job.background</name><value>false
            //If you are reading from mongo, the URI --><name>mongo.input.uri</name><value>mongodb://"+dbserver+"/"+input
            //The number of documents to limit to for read [OPTIONAL] --><name>mongo.input.limit</name><value>" + nLimit
            //The query, in JSON, to execute [OPTIONAL] --><name>mongo.input.query</name><value>" + StringEscapeUtils.escapeXml(query)
            //The fields, in JSON, to read [OPTIONAL] --><name>mongo.input.fields</name><value>"+( (fields==null) ? ("") : fields )
            //InputFormat Class --><name>mongo.job.input.format</name><value>com.ikanow.infinit.e.data_model.custom.InfiniteMongoInputFormat

            _mutable_output.put("mongo.job.name",
                    Optional.ofNullable(job_input.data_service()).orElse("unknown") + ":"
                            + Optional.ofNullable(job_input.resource_name_or_id()).orElse("unknown")); // (i think this is ignored in fact)            
            _mutable_output.put("mongo.job.verbose", "true");
            _mutable_output.put("mongo.job.background", "false");
            _mutable_output.put("mongo.input.uri", "mongodb://" + db_server + "/doc_metadata.metadata");
            _mutable_output.put("mongo.input.query", horrible_object._1());
            _mutable_output.put("mongo.input.fields",
                    Optional.ofNullable(horrible_object._4()).map(o -> o.toString()).orElse(""));
            _mutable_output.put("mongo.input.limit", Optional.ofNullable(job_input.config())
                    .map(cfg -> cfg.test_record_limit_request()).map(o -> o.toString()).orElse("0"));

            // 2) Basic Infinit.e/MongoDB fields:
            //Maximum number of splits [optional] --><name>max.splits</name><value>"+nSplits
            //Maximum number of docs per split [optional] --><name>max.docs.per.split</name><value>"+nDocsPerSplit
            _mutable_output.put("max.splits", horrible_object._2()._1().toString());
            _mutable_output.put("max.docs.per.split", horrible_object._2()._2().toString());

            // 3) Advanced Infinit.e/MongoDB fields:            
            //Infinit.e src tags filter [optional] --><name>infinit.e.source.tags.filter</name><value>"+srcTags.toString()
            if (null != horrible_object._3()) {
                _mutable_output.put("infinit.e.source.tags.filter", horrible_object._3().toString());
            }
            return Optional.of(Collections.unmodifiableMap(_mutable_output));
        }
    };
}

From source file:com.teradata.benchto.driver.graphite.GraphiteClient.java

@Retryable(value = { RestClientException.class,
        IncompleteDataException.class }, backoff = @Backoff(delay = 5000, multiplier = 2), maxAttempts = 4)
public Map<String, double[]> loadMetrics(Map<String, String> metrics, long fromEpochSecond,
        long toEpochSecond) {
    URI uri = buildLoadMetricsURI(metrics, fromEpochSecond, toEpochSecond);

    LOGGER.debug("Loading metrics: {}", uri);

    ResponseEntity<GraphiteRenderResponseItem[]> response = restTemplate.getForEntity(uri,
            GraphiteRenderResponseItem[].class);

    if (response.getStatusCode() != OK) {
        throw new BenchmarkExecutionException("Could not load metrics: " + metrics + " - error: " + response);
    }//w  ww  .  ja  v a  2  s.com

    return Arrays.stream(response.getBody()).collect(toMap(GraphiteRenderResponseItem::getTarget,
            responseItem -> parseDataPoints(responseItem.datapoints)));
}

From source file:eu.itesla_project.online.tools.PrintOnlineWorkflowPostContingencyViolationsTool.java

@Override
public void run(CommandLine line) throws Exception {
    OnlineConfig config = OnlineConfig.load();
    String workflowId = line.getOptionValue("workflow");
    final LimitViolationFilter violationsFilter = (line.hasOption("type"))
            ? new LimitViolationFilter(Arrays.stream(line.getOptionValue("type").split(","))
                    .map(LimitViolationType::valueOf).collect(Collectors.toSet()), 0)
            : null;/*from w  ww.  ja va 2 s.  c o  m*/
    TableFormatterConfig tableFormatterConfig = TableFormatterConfig.load();
    Column[] tableColumns = { new Column("State"), new Column("Contingency"), new Column("Equipment"),
            new Column("Type"), new Column("Value"), new Column("Limit"), new Column("Limit reduction"),
            new Column("Voltage Level") };
    Path cvsOutFile = (line.hasOption("csv")) ? Paths.get(line.getOptionValue("csv")) : null;
    try (OnlineDb onlinedb = config.getOnlineDbFactoryClass().newInstance().create()) {
        if (line.hasOption("state") && line.hasOption("contingency")) {
            Integer stateId = Integer.parseInt(line.getOptionValue("state"));
            String contingencyId = line.getOptionValue("contingency");
            List<LimitViolation> violations = onlinedb.getPostContingencyViolations(workflowId, stateId,
                    contingencyId);
            if (violations != null && !violations.isEmpty()) {
                try (TableFormatter formatter = PrintOnlineWorkflowUtils.createFormatter(tableFormatterConfig,
                        cvsOutFile, TABLE_TITLE, tableColumns)) {
                    printStateContingencyViolations(formatter, stateId, contingencyId, violations,
                            violationsFilter);
                }
            } else {
                System.out.println("\nNo post contingency violations for workflow " + workflowId
                        + ", contingency " + contingencyId + " and state " + stateId);
            }
        } else if (line.hasOption("state")) {
            Integer stateId = Integer.parseInt(line.getOptionValue("state"));
            Map<String, List<LimitViolation>> stateViolationsByStateId = onlinedb
                    .getPostContingencyViolations(workflowId, stateId);
            if (stateViolationsByStateId != null && !stateViolationsByStateId.keySet().isEmpty()) {
                try (TableFormatter formatter = PrintOnlineWorkflowUtils.createFormatter(tableFormatterConfig,
                        cvsOutFile, TABLE_TITLE, tableColumns)) {
                    new TreeMap<>(stateViolationsByStateId)
                            .forEach((contingencyId, violations) -> printStateContingencyViolations(formatter,
                                    stateId, contingencyId, violations, violationsFilter));
                }
            } else {
                System.out.println("\nNo post contingency violations for workflow " + workflowId + " and state "
                        + stateId);
            }
        } else {
            if (line.hasOption("contingency")) {
                String contingencyId = line.getOptionValue("contingency");
                Map<Integer, List<LimitViolation>> contingencyViolationsByContingencyId = onlinedb
                        .getPostContingencyViolations(workflowId, contingencyId);
                if (contingencyViolationsByContingencyId != null
                        && !contingencyViolationsByContingencyId.keySet().isEmpty()) {
                    try (TableFormatter formatter = PrintOnlineWorkflowUtils
                            .createFormatter(tableFormatterConfig, cvsOutFile, TABLE_TITLE, tableColumns)) {
                        new TreeMap<>(contingencyViolationsByContingencyId)
                                .forEach((stateId, violations) -> printStateContingencyViolations(formatter,
                                        stateId, contingencyId, violations, violationsFilter));
                    }
                } else {
                    System.out.println("\nNo post contingency violations for workflow " + workflowId
                            + " and contingency " + contingencyId);
                }
            } else {
                Map<Integer, Map<String, List<LimitViolation>>> wfViolations = onlinedb
                        .getPostContingencyViolations(workflowId);
                if (wfViolations != null && !wfViolations.keySet().isEmpty()) {
                    try (TableFormatter formatter = PrintOnlineWorkflowUtils
                            .createFormatter(tableFormatterConfig, cvsOutFile, TABLE_TITLE, tableColumns)) {
                        new TreeMap<>(wfViolations).forEach((stateId, stateViolations) -> {
                            if (stateViolations != null) {
                                new TreeMap<>(stateViolations).forEach((contingencyId, violations) -> {
                                    printStateContingencyViolations(formatter, stateId, contingencyId,
                                            violations, violationsFilter);
                                });
                            }
                        });

                    }
                } else {
                    System.out.println("\nNo post contingency violations for workflow " + workflowId);
                }
            }
        }
    }
}

From source file:gedi.atac.OnlinePeakFinder.java

@Override
public IntervalTree<GenomicRegion, Double> map(ReferenceSequence reference, GenomicRegion region,
        PixelLocationMapping pixelMapping,
        MutablePair<PixelBlockToValuesMap, IntervalTree<GenomicRegion, AlignedReadsData>> data) {
    int bases = 0;
    int bins = data.Item1.size();

    int[] c = new int[bins];

    for (int b = 0; b < data.Item1.size(); b++) {
        PixelLocationMappingBlock bl = data.Item1.getBlock(b);
        bases += bl.getStopBp() + 1 - bl.getStartBp();

        c[b] = (int) NumericArrayFunction.Sum.applyAsDouble(data.Item1.getValues(b));
    }/*w w w.  j  av  a  2  s . c o m*/

    System.out.printf("Bases=%d\tBins=%d\tInsertions=%d\tSites=%d\n", bases, bins, ArrayUtils.sum(c),
            Arrays.stream(c).filter(v -> v > 0).count());

    IntArrayList hist = new IntArrayList();
    for (int i : c)
        hist.increment(i);

    try {
        r.eval("layout(t(1:2))");
        r.set("h", hist.toIntArray());
        r.eval("barplot(h,names.arg=1:length(h))");

    } catch (RserveException e) {
    }

    hist.clear();
    for (GenomicRegion d : data.Item2.keySet())
        hist.increment(d.getTotalLength(), data.Item2.get(d).getTotalCountOverallFloor(ReadCountMode.Weight));

    try {

        r.set("h", hist.toIntArray());
        r.eval("barplot(h,names.arg=1:length(h))");

    } catch (RserveException e) {
    }

    GenomicRegion re = new ArrayGenomicRegion();

    double mean = ArrayUtils.sum(c) / c.length;
    // find consecutive ranges that are >=factor*mean
    int sizeBefore;

    do {
        sizeBefore = re.getTotalLength();

        double min = mean * factor;
        int start = -1;
        double sum = 0;
        for (int i = 0; i < c.length; i++) {
            if (c[i] >= min) {
                // in range
                if (start == -1)
                    start = i;
            } else {
                // not in range
                if (start > -1) {
                    for (; start - 1 >= 0 && c[start - 1] > mean; start--)
                        ;
                    for (; i + 1 < c.length && c[i + 1] > mean; i++)
                        ;
                    re = re.union(new ArrayGenomicRegion(start, i));
                    start = -1;
                }
                sum += c[i];
            }
        }

        if (start > -1) {
            for (; start - 1 >= 0 && c[start - 1] > mean; start--)
                ;
            re = re.union(new ArrayGenomicRegion(start, c.length));
        }

        // compute new mean
        mean = sum / (c.length - re.getTotalLength());

    } while (sizeBefore < re.getTotalLength());

    IntervalTree<GenomicRegion, Double> re2 = new IntervalTree<GenomicRegion, Double>(
            data.Item2.getReference());
    for (GenomicRegionPart p : re)
        re2.put(new ArrayGenomicRegion(data.Item1.getBlock(p.getStart()).getStartBp(),
                data.Item1.getBlock(p.getStop()).getStopBp() + 1), max(c, p.getStart(), p.getStop() + 1));

    return re2;

}

From source file:com.flipkart.flux.client.intercept.WorkflowInterceptor.java

private String checkForCorrelationId(Object[] arguments) throws IllegalAccessException {
    final String[] correlationId = { null };
    /* Iterate over given arguments to find if there is any argument that has a field marked with <code>CorrelationId</code> */
    for (Object anArgument : arguments) {
        final Field[] allFields = anArgument.getClass().getDeclaredFields();
        /* Search for any field which is of type String and has a CorrelationId annotation */
        final Optional<Field> possibleAnnotatedField = Arrays.stream(allFields)
                .filter(field -> String.class.isAssignableFrom(field.getType()))
                .filter(field -> field.getAnnotationsByType(CorrelationId.class).length > 0).findAny();
        /* If we have a field that matches above criteria, we populate the correlationId variable and break */
        if (possibleAnnotatedField.isPresent()) {
            final Field correlationIdAnnotatedField = possibleAnnotatedField.get();
            final boolean originalAccessibility = correlationIdAnnotatedField.isAccessible();
            if (!originalAccessibility) {
                correlationIdAnnotatedField.setAccessible(true);
            }/*ww  w.j av a 2  s  .  com*/
            try {
                correlationId[0] = (String) correlationIdAnnotatedField.get(anArgument);
                break;
            } finally {
                if (!originalAccessibility) {
                    correlationIdAnnotatedField.setAccessible(false);
                }
            }
        }
    }
    return correlationId[0];
}

From source file:io.blobkeeper.file.service.FileListServiceImpl.java

@Override
public List<Integer> getDisks() {
    java.io.File filePath = new java.io.File(configuration.getBasePath());
    checkArgument(filePath.exists(), "Base path must be exists");

    return Arrays.stream(filePath.list(DIRECTORY)).map(this::parseDisk).filter(disk -> disk != null)
            .collect(toImmutableList());
}