Example usage for java.util ArrayList parallelStream

List of usage examples for java.util ArrayList parallelStream

Introduction

In this page you can find the example usage for java.util ArrayList parallelStream.

Prototype

default Stream<E> parallelStream() 

Source Link

Document

Returns a possibly parallel Stream with this collection as its source.

Usage

From source file:com.polytech4A.cuttingstock.core.resolution.util.context.ContextLoaderUtils.java

/**
 * Load the content of the context file.
 *
 * @param path path of the context File.
 * @return Context loaded.// w ww.  ja v  a2  s. c om
 * @throws IOException                   if file not found.
 * @throws MalformedContextFileException if the Context file don't have the right structure.
 * @throws IllogicalContextException     if an image is bigger than pattern max size.
 */
public static Context loadContext(String path)
        throws IOException, MalformedContextFileException, IllogicalContextException {
    File file = new File(path);
    LineIterator it = FileUtils.lineIterator(file, "UTF-8");
    ArrayList<Box> boxes = new ArrayList<>();
    try {
        Double x = loadLine(it, "^LX=[0-9]{1,13}(\\.[0-9]*)?$");
        Double y = loadLine(it, "LY=[0-9]{1,13}(\\.[0-9]*)?");
        int cost = loadLine(it, "m=[0-9]{1,13}(\\.[0-9]*)?").intValue();
        while (it.hasNext()) {
            boxes.add(loadBox(it.nextLine()));
        }
        LineIterator.closeQuietly(it);
        double max = Math.max(x, y);
        if (boxes.parallelStream().anyMatch(b -> b.getSize().getX() > max)
                || boxes.parallelStream().anyMatch(b -> b.getSize().getY() > max)) {
            throw new IllogicalContextException("There is an image which is bigger than the pattern.");
        }
        return new Context(file.getName(), 20, 1, boxes, new Vector(x, y));
    } catch (MalformedContextFileException mctx) {
        throw mctx;
    } finally {
        LineIterator.closeQuietly(it);
    }
}

From source file:delfos.rs.trustbased.WeightedGraph.java

private static void validateWeightsGraph(AdjMatrixEdgeWeightedDigraph adjMatrixEdgeWeightedDigraph) {

    List<DirectedEdge> allEdges = IntStream.range(0, adjMatrixEdgeWeightedDigraph.V()).boxed().parallel()
            .map(vertex -> {//from   www.j  a  v a 2s  .co m
                Iterable<DirectedEdge> iterator = adjMatrixEdgeWeightedDigraph.adj(vertex);
                ArrayList<DirectedEdge> listOfEdges = new ArrayList<>();
                for (DirectedEdge edge : iterator) {
                    listOfEdges.add(edge);
                }
                return listOfEdges;
            }).flatMap(listOfEdges -> listOfEdges.parallelStream()).collect(Collectors.toList());

    List<DirectedEdge> badEdges = allEdges.parallelStream()
            .filter(edge -> (edge.weight() < 0) || (edge.weight() > 1)).collect(Collectors.toList());

    if (!badEdges.isEmpty()) {
        System.out.println("List of bad edges:");
        badEdges.forEach(edge -> System.out.println("\t" + edge));
        throw new IllegalStateException("arg");
    }

}

From source file:com.evilisn.ScheduledTasks.java

@Scheduled(fixedRate = 10 * 60000)
public void scheduledCheckOCSP() throws InterruptedException {
    log.info("fetching certs");
    ArrayList<Cert> certs = (ArrayList<Cert>) getCerts();
    log.info(String.format("[%d] certs to be checked.", certs.size()));

    Date now = new Date();
    long startTime = now.getTime();
    System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", "50");
    AtomicInteger c_REVOKED = new AtomicInteger();
    AtomicInteger c_GOOD = new AtomicInteger();
    AtomicInteger c_UNKNOWN = new AtomicInteger();
    AtomicInteger c_VALID = new AtomicInteger();
    AtomicInteger c_EXPIRED = new AtomicInteger();

    certs.parallelStream().forEach(o -> {
        try {/* w ww.  j  a v  a  2  s  . c  o  m*/
            if (o.getClient_cert().getNotAfter().after(now)) {
                OCSP.RevocationStatus.CertStatus resp = OCSP.check(o.getClient_cert(), o.getIssuer_cert())
                        .getCertStatus();
                log.info(String.format("Serial Number [%20s]| OCSP Status:[%s]",
                        o.getClient_cert().getSerialNumber(), resp.toString()));
                c_VALID.getAndIncrement();
                if (resp == OCSP.RevocationStatus.CertStatus.GOOD)
                    c_GOOD.getAndIncrement();
                if (resp == OCSP.RevocationStatus.CertStatus.UNKNOWN)
                    c_UNKNOWN.getAndIncrement();
                if (resp == OCSP.RevocationStatus.CertStatus.REVOKED)
                    c_REVOKED.getAndIncrement();
            } else {
                //expired.
                c_EXPIRED.getAndIncrement();
            }
        } catch (IOException e) {
            e.printStackTrace();
        } catch (CertPathValidatorException e) {
            e.printStackTrace();
        }
    });
    long endTime = System.currentTimeMillis();
    log.info("ALL " + certs.size() + " certificates processed in " + (endTime - startTime) / 100
            + " seconds, with " + c_VALID.get() + " valid certs, " + c_EXPIRED + " expired certs, among which "
            + c_GOOD.get() + " is GOOD, " + c_REVOKED.get() + " is revoked, and " + c_UNKNOWN.get()
            + " is KNOWN.");
}

From source file:com.wso2.code.quality.matrices.ChangesFinder.java

/**
 * Reading the blame received for a current selected file name and insert the parent commits of the changed lines,
 * relevant authors and the relevant commits hashes to look for the reviewers of those line ranges
 *
 * @param rootJsonObject                                JSONObject containing blame information for current selected file
 * @param arrayListOfRelevantChangedLinesOfSelectedFile arraylist containing the changed line ranges of the current selected file
 * @param gettingPr                                     should be true if running this method for finding the authors of buggy lines which are being fixed from  the patch
 *///w  ww  .ja v a2  s .  c o  m
public void readBlameReceivedForAFile(JSONObject rootJsonObject,
        ArrayList<String> arrayListOfRelevantChangedLinesOfSelectedFile, boolean gettingPr, String oldRange) {

    //running a iterator for fileName arrayList to get the location of the above saved file
    JSONObject dataJSONObject = (JSONObject) rootJsonObject.get(GITHUB_GRAPHQL_API_DATA_KEY_STRING);
    JSONObject repositoryJSONObect = (JSONObject) dataJSONObject.get(GITHUB_GRAPHQL_API_REPOSITORY_KEY_STRING);
    JSONObject objectJSONObject = (JSONObject) repositoryJSONObect.get(GITHUB_GRAPHQL_API_OBJECT_KEY_STRING);
    JSONObject blameJSONObject = (JSONObject) objectJSONObject.get(GITHUB_GRAPHQL_API_BLAME_KEY_STRING);
    JSONArray rangeJSONArray = (JSONArray) blameJSONObject.get(GITHUB_GRAPHQL_API_RANGES_KEY_STRING);

    //getting the starting line no of the range of lines that are modified from the patch
    // parallel streams are not used in here as the order of the arraylist is important in the process
    arrayListOfRelevantChangedLinesOfSelectedFile.stream().forEach(lineRanges -> {
        int startingLineNo = 0;
        int endLineNo = 0;
        String oldFileRange = StringUtils.substringBefore(lineRanges, "/");
        String newFileRange = StringUtils.substringAfter(lineRanges, "/");
        // need to skip the newly created files from taking the blame as they contain no previous commits
        if (!oldFileRange.equals("0,0")) {
            if (gettingPr && oldRange.equals(oldFileRange)) {
                // need to consider the line range in the old file for finding authors and reviewers
                startingLineNo = Integer.parseInt(StringUtils.substringBefore(oldFileRange, ","));
                endLineNo = Integer.parseInt(StringUtils.substringAfter(oldFileRange, ","));
            } else if (!gettingPr && oldRange == null) {
                // need to consider the line range in the new file resulted from applying the commit, for finding parent commits
                startingLineNo = Integer.parseInt(StringUtils.substringBefore(newFileRange, ","));
                endLineNo = Integer.parseInt(StringUtils.substringAfter(newFileRange, ","));
            } else {
                return; // to skip the to the next iteration if oldRange != oldFileRange when finding authornames and commits for obtaining PRs
            }

            // as a new mapForStoringAgeAndIndex map should be available for each line range to find the most recent change
            Map<Integer, ArrayList<Integer>> mapForStoringAgeAndIndex = new HashMap<Integer, ArrayList<Integer>>();

            //checking line by line by iterating the startingLineNo
            while (endLineNo >= startingLineNo) {
                // since the index value is required for later processing, without Java 8 features "for loop" is used for iteration
                for (int i = 0; i < rangeJSONArray.length(); i++) {
                    JSONObject rangeJSONObject = (JSONObject) rangeJSONArray.get(i);
                    int tempStartingLineNo = (int) rangeJSONObject
                            .get(GITHUB_GRAPHQL_API_STARTING_LINE_KEY_STRING);
                    int tempEndingLineNo = (int) rangeJSONObject.get(GITHUB_GRAPHQL_API_ENDING_LINE_KEY_STRING);

                    //checking whether the line belongs to that line range group
                    if ((tempStartingLineNo <= startingLineNo) && (tempEndingLineNo >= startingLineNo)) {
                        // so the relevant startingLineNo belongs in this line range in other words in this JSONObject
                        if (!gettingPr) {
                            int age = (int) rangeJSONObject.get(GITHUB_GRAPHQL_API_AGE_KEY_STRING);
                            // storing the age field with relevant index of the JSONObject
                            mapForStoringAgeAndIndex.putIfAbsent(age, new ArrayList<Integer>());
                            if (!mapForStoringAgeAndIndex.get(age).contains(i)) {
                                mapForStoringAgeAndIndex.get(age).add(i); // adding if the index is not present in the array list for the relevant age
                            }

                        } else {
                            //for saving the author names of commiters
                            JSONObject commitJSONObject = (JSONObject) rangeJSONObject
                                    .get(GITHUB_GRAPHQL_API_COMMIT_KEY_STRING);

                            JSONObject authorJSONObject = (JSONObject) commitJSONObject
                                    .get(GITHUB_GRAPHQL_API_AUTHOR_KEY_STRING);
                            String nameOfTheAuthor = (String) authorJSONObject
                                    .get(GITHUB_GRAPHQL_API_NAME_KEY_STRING);
                            authorNames.add(nameOfTheAuthor); // authors are added to the Set

                            String urlOfCommit = (String) commitJSONObject
                                    .get(GITHUB_GRAPHQL_API_URL_KEY_STRING);
                            String commitHashForPRReview = StringUtils.substringAfter(urlOfCommit, "commit/");
                            commitHashObtainedForPRReview.add(commitHashForPRReview);
                        }
                        break;
                    } else {
                        continue; // to skip to the next JSON Object in the rangeJSONArray
                    }
                }
                startingLineNo++; // to check for other line numbers
            }

            //for the above line range getting the lastest commit which modified the lines
            if (!gettingPr) {
                //converting the map into a treeMap to get it ordered
                TreeMap<Integer, ArrayList<Integer>> treeMap = new TreeMap<>(mapForStoringAgeAndIndex);
                int minimumKeyOfMapForStoringAgeAndIndex = treeMap.firstKey(); // getting the minimum key
                //getting the relevant JSONObject indexes which consists of the recent change with in the relevant line range
                ArrayList<Integer> indexesOfJsonObjectForRecentCommit = mapForStoringAgeAndIndex
                        .get(minimumKeyOfMapForStoringAgeAndIndex);
                // the order of the indexesOfJsonObjectForRecentCommit is not important as we only need to get the parent commit hashes
                indexesOfJsonObjectForRecentCommit.parallelStream().forEach(index -> {
                    JSONObject rangeJSONObject = (JSONObject) rangeJSONArray.get(index);
                    JSONObject commitJSONObject = (JSONObject) rangeJSONObject
                            .get(GITHUB_GRAPHQL_API_COMMIT_KEY_STRING);
                    JSONObject historyJSONObject = (JSONObject) commitJSONObject
                            .get(GITHUB_GRAPHQL_API_HISTORY_KEY_STRING);
                    JSONArray edgesJSONArray = (JSONArray) historyJSONObject
                            .get(GITHUB_GRAPHQL_API_EDGE_KEY_STRING);
                    //getting the second json object from the array as it contain the commit of the parent which modified the above line range
                    JSONObject edgeJSONObject = (JSONObject) edgesJSONArray.get(1);
                    JSONObject nodeJSONObject = (JSONObject) edgeJSONObject
                            .get(GITHUB_GRAPHQL_API_NODE_KEY_STRING);
                    String urlOfTheParentCommit = (String) nodeJSONObject
                            .get(GITHUB_GRAPHQL_API_URL_KEY_STRING); // this contain the URL of the parent commit
                    String commitHash = (String) StringUtils.substringAfter(urlOfTheParentCommit, "commit/");
                    //                                        commitHashesOfTheParent.add(commitHash);    // commitHashesof the parent for the selected file

                    commitHashesMapOfTheParent.putIfAbsent(oldFileRange, new HashSet<String>());
                    if (!commitHashesMapOfTheParent.get(oldFileRange).contains(commitHash)) {
                        commitHashesMapOfTheParent.get(oldFileRange).add(commitHash);
                    }
                });
            }

        }

    });
}

From source file:com.polytech4A.cuttingstock.core.solver.SimulatedAnnealing.java

/**
 * Set Temperature of Simulated Annealing Algorithm.
 *
 * @param solution solution onto generate temperature.
 * @return -1 if definition of the first temperature failed.
 * Temperature generated./*from   www  .  j av  a2 s  .  c  o  m*/
 */
public double setFirstTemperature(Solution solution) {
    Solution clSolution = solution.clone();
    ArrayList<Solution> solutionsN = new ArrayList<>(); // Solutions from the neighbourhood of the solution.
    int nbIteration = (1000 * solution.getPatterns().get(0).getAmounts().size())
            / solution.getPatterns().size();
    for (int i = 0; i < nbIteration; i++) {
        Solution randomSolution = chooseRandomNeihbourUtils().getRandomNeighbour(clSolution);
        Solution packedSolution = packer.getPlacing(randomSolution);

        if (packedSolution.isPackable() && packedSolution.isValid()) {
            solutionsN.add(packedSolution);
        }
    }
    try {
        double temperature = solutionsN.parallelStream().mapToDouble(s -> {
            Result r = simplex.minimize(s);
            if (r == null) {
                return (double) 0;
            } else {
                return r.getCost();
            }
        }).max().getAsDouble() * temp_coef;
        this.temperature = temperature;
        return temperature;
    } catch (NoSuchElementException ex) {
        logger.error("Generation of first temperature", ex);
    }
    this.temperature = -1;
    return -1;
}

From source file:diffhunter.Indexer.java

public void Make_Index(Database hashdb, String file_name, String read_gene_location)
        throws FileNotFoundException, IOException {
    Set_Parameters();/*from   www  .  ja  v a 2 s  .  c o m*/
    //System.out.print("Sasa");
    ConcurrentHashMap<String, Map<Integer, Integer>> dic_gene_loc_count = new ConcurrentHashMap<>();
    ArrayList<String> lines_from_bed_file = new ArrayList<>();
    BufferedReader br = new BufferedReader(new FileReader(file_name));

    String line = br.readLine();
    List<String> toks = Arrays.asList(line.split("\t"));
    lines_from_bed_file.add(line);
    String last_Seen_chromosome = toks.get(0).replace("chr", "");
    line = br.readLine();
    lines_from_bed_file.add(line);
    toks = Arrays.asList(line.split("\t"));
    String new_chromosome = toks.get(0).replace("chr", "");

    while (((line = br.readLine()) != null) || lines_from_bed_file.size() > 0) {
        if (line != null) {
            toks = Arrays.asList(line.split("\t"));
            new_chromosome = toks.get(0).replace("chr", "");
        }
        // process the line.
        if (line == null || !new_chromosome.equals(last_Seen_chromosome)) {
            System.out.println("Processing chromosome" + "\t" + last_Seen_chromosome);
            last_Seen_chromosome = new_chromosome;
            lines_from_bed_file.parallelStream().forEach(content -> {

                List<String> inner_toks = Arrays.asList(content.split("\t"));
                //WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING 
                //STRAND column count should be changed. 
                String strand = inner_toks.get(5);
                String chromosome_ = inner_toks.get(0).replace("chr", "");
                if (!dic_Loc_gene.get(strand).containsKey(chromosome_)) {
                    return;
                }
                Integer start_loc = Integer.parseInt(inner_toks.get(1));
                Integer end_loc = Integer.parseInt(inner_toks.get(2));
                List<Interval<String>> res__ = dic_Loc_gene.get(strand).get(chromosome_).getIntervals(start_loc,
                        end_loc);
                //IntervalTree<String> pot_gene_name=new IntervalTree<>(res__);
                //                        for (int z = 0; z < pot_gene_name.Intervals.Count; z++)
                //{
                for (int z = 0; z < res__.size(); z++) {

                    dic_gene_loc_count.putIfAbsent(res__.get(z).getData(), new HashMap<>());
                    String gene_symbol = res__.get(z).getData();
                    Integer temp_gene_start_loc = dic_genes.get(gene_symbol).start_loc;
                    Integer temp_gene_end_loc = dic_genes.get(gene_symbol).end_loc;
                    if (start_loc < temp_gene_start_loc) {
                        start_loc = temp_gene_start_loc;
                    }
                    if (end_loc > temp_gene_end_loc) {
                        end_loc = temp_gene_end_loc;
                    }
                    synchronized (dic_synchrinzer_genes.get(gene_symbol)) {
                        for (int k = start_loc; k <= end_loc; k++) {
                            Integer value_inside = 0;
                            value_inside = dic_gene_loc_count.get(gene_symbol).get(k);
                            dic_gene_loc_count.get(gene_symbol).put(k,
                                    value_inside == null ? 1 : (value_inside + 1));
                        }
                    }
                }
            });
            /*                    List<string> keys_ = dic_gene_loc_count.Keys.ToList();
             List<string> alt_keys = new List<string>();// dic_gene_loc_count.Keys.ToList();
             for (int i = 0; i < keys_.Count; i++)
             {
             Dictionary<int, int> dicccc_ = new Dictionary<int, int>();
             dic_gene_loc_count[keys_[i]] = new Dictionary<int, int>(dic_gene_loc_count[keys_[i]].Where(x => x.Value >= 2).ToDictionary(x => x.Key, x => x.Value));
             if (dic_gene_loc_count[keys_[i]].Count == 0)
             {
                    
             dic_gene_loc_count.TryRemove(keys_[i], out dicccc_);
             continue;
             }
             hashdb.Put(Get_BDB(keys_[i]), Get_BDB_Dictionary(dic_gene_loc_count[keys_[i]]));
             alt_keys.Add(keys_[i]);
             dic_gene_loc_count.TryRemove(keys_[i], out dicccc_);
             }*/
            ArrayList<String> keys_ = new ArrayList<>(dic_gene_loc_count.keySet());
            ArrayList<String> alt_keys = new ArrayList<>();
            for (int i = 0; i < keys_.size(); i++) {

                //LinkedHashMap<Integer, Integer> tmep_map = new LinkedHashMap<>(dic_gene_loc_count.get(keys_.get(i)));
                LinkedHashMap<Integer, Integer> tmep_map = new LinkedHashMap<>();
                /*tmep_map = */
                dic_gene_loc_count.get(keys_.get(i)).entrySet().stream().filter(p -> p.getValue() >= 2)
                        .sorted(Comparator.comparing(E -> E.getKey()))
                        .forEach((entry) -> tmep_map.put(entry.getKey(), entry.getValue()));//.collect(Collectors.toMap(p -> p.getKey(), p -> p.getValue()));
                if (tmep_map.isEmpty()) {
                    dic_gene_loc_count.remove(keys_.get(i));
                    continue;
                }

                //Map<Integer, Integer> tmep_map1 = new LinkedHashMap<>();
                //tmep_map1=sortByKey(tmep_map);
                //tmep_map.entrySet().stream().sorted(Comparator.comparing(E -> E.getKey())).forEach((entry) -> tmep_map1.put(entry.getKey(), entry.getValue()));
                //BerkeleyDB_Box box=new BerkeleyDB_Box();
                hashdb.put(null, BerkeleyDB_Box.Get_BDB(keys_.get(i)),
                        BerkeleyDB_Box.Get_BDB_Dictionary(tmep_map));
                alt_keys.add(keys_.get(i));
                dic_gene_loc_count.remove(keys_.get(i));
                //dic_gene_loc_count.put(keys_.get(i),tmep_map);
            }

            hashdb.sync();
            int a = 1111;
            /*                    hashdb.Sync();
             File.AppendAllLines("InputDB\\" + Path.GetFileNameWithoutExtension(file_name) + "_genes.txt", alt_keys);
             //total_lines_processed_till_now += lines_from_bed_file.Count;
             //worker.ReportProgress(total_lines_processed_till_now / count_);
             lines_from_bed_file.Clear();
             if (!reader.EndOfStream)
             {
             lines_from_bed_file.Add(_line_);
             }
             last_Seen_chromosome = new_choromosome;*/
            lines_from_bed_file.clear();
            if (line != null) {
                lines_from_bed_file.add(line);
            }
            Path p = Paths.get(file_name);
            file_name = p.getFileName().toString();

            BufferedWriter output = new BufferedWriter(new FileWriter((Paths
                    .get(read_gene_location, FilenameUtils.removeExtension(file_name) + ".txt").toString()),
                    true));
            for (String alt_key : alt_keys) {
                output.append(alt_key);
                output.newLine();
            }
            output.close();
            /*if (((line = br.readLine()) != null))
            {
            lines_from_bed_file.add(line);
            toks=Arrays.asList(line.split("\t"));
            new_chromosome=toks.get(0).replace("chr", "");
            }*/
            //last_Seen_chromosome=new_chromosome;
        } else if (new_chromosome.equals(last_Seen_chromosome)) {
            lines_from_bed_file.add(line);
        }

    }
    br.close();
    hashdb.sync();
    hashdb.close();

}

From source file:com.github.jackygurui.vertxredissonrepository.repository.Impl.RedisRepositoryImpl.java

private T mapResults(T instance, List results, ArrayList<String> pList)
        throws InstantiationException, IllegalAccessException, IllegalArgumentException {
    pList.parallelStream().forEach(e -> {
        Object o = instance;/*w w w .j  a v  a  2  s .  c o m*/
        for (String fieldName : e.split("\\.")) {
            BeanMap m = new BeanMap(o);
            Class type = m.getType(fieldName);
            if (isRedisEntity(type)) {
                o = m.get(fieldName);
            } else {
                Object value;
                Object result = results.get(pList.indexOf(e));
                if (result == null) {
                    value = null;
                } else if (String.class.isAssignableFrom(type)) {
                    value = result.toString();
                } else if (type.isEnum()) {
                    try {
                        value = type.getMethod("valueOf", String.class).invoke(null,
                                results.get(pList.indexOf(e)));
                    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
                            | IllegalArgumentException | InvocationTargetException ex) {
                        throw new IllegalArgumentException(ex);
                    }
                } else if (result.getClass().isAssignableFrom(type)) {
                    value = type.cast(result);
                } else {
                    try {
                        value = type.getConstructor(result.getClass()).newInstance(result);
                    } catch (NoSuchMethodException | SecurityException | InstantiationException
                            | IllegalAccessException | IllegalArgumentException
                            | InvocationTargetException ex) {
                        throw new IllegalArgumentException(ex);
                    }
                }
                m.put(fieldName, value);
            }
        }
    });
    return instance;
}

From source file:inflor.knime.nodes.transform.create.TransformNodeModel.java

/**
 * {@inheritDoc}/*  w w w.  j a  va 2  s.c o  m*/
 */
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec)
        throws Exception {
    // Create the output spec and data container.
    exec.setProgress(0.01, "Initializing execution");
    DataTableSpec[] outSpecs = createSpecs(inData[0].getSpec());
    BufferedDataContainer container = exec.createDataContainer(outSpecs[0]);
    String columnName = modelSettings.getSelectedColumn();
    int columnIndex = outSpecs[0].findColumnIndex(columnName);

    // Collect the input data.
    exec.setMessage("Reading data");
    ExecutionContext readExec = exec.createSubExecutionContext(0.25);
    ArrayList<FCSFrameFileStoreDataCell> dataSet = new ArrayList<>();
    int rowIndex = 0;
    for (final DataRow inRow : inData[0]) {
        FCSFrameFileStoreDataCell cell = (FCSFrameFileStoreDataCell) inRow.getCell(columnIndex);
        dataSet.add(cell);
        readExec.setProgress((double) rowIndex / inData[0].size(),
                "Reading: " + cell.getFCSFrameMetadata().getDisplayName());
        rowIndex++;
    }

    // create a summary frame containing merged data from all files.
    DataColumnProperties props = inData[0].getSpec().getColumnSpec(columnName).getProperties();
    if (props.containsProperty(FCSUtilities.PROP_KEY_PREVIEW_FRAME)) {
        String summaryString = props.getProperty(FCSUtilities.PROP_KEY_PREVIEW_FRAME);
        summaryFrame = FCSFrame.loadFromProtoString(summaryString);
        if (!modelSettings.getReferenceSubset().equals(TransformNodeSettings.DEFAULT_REFERENCE_SUBSET)) {
            BitSet mask = summaryFrame.getFilteredFrame(modelSettings.getReferenceSubset(), true);
            summaryFrame = FCSUtilities.filterFrame(mask, summaryFrame);
        }
    } else {
        // Filter it down to reference subset
        exec.setMessage("filtering data");
        ExecutionContext filterExec = exec.createSubExecutionContext(0.5);
        String referenceSubset = modelSettings.getReferenceSubset();
        List<FCSFrame> filteredData;
        if (!referenceSubset.equals(TransformNodeSettings.DEFAULT_REFERENCE_SUBSET)) {
            subtaskIndex = 0;
            filteredData = dataSet.parallelStream().map(cell -> filterDataFrame(filterExec,
                    cell.getFCSFrameValue(), referenceSubset, dataSet.size())).collect(Collectors.toList());
        } else {
            filteredData = dataSet.parallelStream().map(cell -> cell.getFCSFrameValue())
                    .collect(Collectors.toList());
        }
        summaryFrame = FCSUtilities.createSummaryFrame(filteredData, 2000);
    }

    // Init and Optimize the transform, record results.
    TransformSet transforms = new TransformSet();
    if (summaryFrame != null) {
        summaryFrame.getDimensionNames()
                .forEach(name -> transforms.addTransformEntry(name, PlotUtils.createDefaultTransform(name)));
    } else {
        throw new CanceledExecutionException("Unable to construct valid data summary.");
    }

    exec.setMessage("Optimizing transforms");
    ExecutionContext optimizeExec = exec.createSubExecutionContext(0.75);
    BufferedDataContainer summaryContainer = exec.createDataContainer(outSpecs[1]);
    subtaskIndex = 0;
    transforms.optimize(summaryFrame);
    transforms.getMap().entrySet().stream().forEach(entry -> optimizeTransform(summaryFrame, entry,
            summaryContainer, optimizeExec, transforms.getMap().size()));
    summaryContainer.close();

    // write the output table.
    exec.setMessage("Writing output");
    ExecutionContext writeExec = exec.createSubExecutionContext(1);
    subtaskIndex = 0;
    for (final DataRow inRow : inData[0]) {
        final DataCell[] outCells = new DataCell[inRow.getNumCells()];
        FCSFrameFileStoreDataCell fileCell = (FCSFrameFileStoreDataCell) inRow.getCell(columnIndex);
        writeExec.setProgress(subtaskIndex / (double) inData[0].size(),
                fileCell.getFCSFrameMetadata().getDisplayName());
        // now create the output row
        FCSFrameMetaData newMetaData = fileCell.getFCSFrameMetadata().copy();

        newMetaData.setTransforms(transforms);

        for (int j = 0; j < outCells.length; j++) {
            if (j == columnIndex) {
                outCells[j] = new FCSFrameFileStoreDataCell(fileCell.getFileStore(), newMetaData);
            } else {
                outCells[j] = inRow.getCell(j);
            }
        }
        final DataRow outRow = new DefaultRow("Row " + subtaskIndex, outCells);
        container.addRowToTable(outRow);
        subtaskIndex++;
    }
    container.close();

    BufferedDataTable table = container.getTable();

    String key = NodeUtilities.KEY_TRANSFORM_MAP;
    String value = transforms.saveToString();
    Map<String, String> newProps = new HashMap<>();
    newProps.put(key, value);
    BufferedDataTable finalTable = NodeUtilities.addPropertyToColumn(exec, table, columnName, newProps);

    return new BufferedDataTable[] { finalTable, summaryContainer.getTable() };
}