Example usage for java.util LinkedList stream

List of usage examples for java.util LinkedList stream

Introduction

In this page you can find the example usage for java.util LinkedList stream.

Prototype

default Stream<E> stream() 

Source Link

Document

Returns a sequential Stream with this collection as its source.

Usage

From source file:com.netflix.conductor.core.execution.DeciderService.java

boolean checkForWorkflowCompletion(final WorkflowDef def, final Workflow workflow) throws Exception {

    List<Task> allTasks = workflow.getTasks();
    if (allTasks.isEmpty()) {
        return false;
    }//from www.ja v  a  2s  .  co m

    Task last = null;
    Map<String, Object> output = new HashMap<>();
    if (!allTasks.isEmpty()) {
        last = allTasks.get(allTasks.size() - 1);
        output = last.getOutputData();
    }
    if (!def.getOutputParameters().isEmpty()) {
        output = getTaskInput(def.getOutputParameters(), workflow, null);
    }
    workflow.setOutput(output);

    Map<String, Status> taskStatusMap = new HashMap<>();
    workflow.getTasks().forEach(task -> taskStatusMap.put(task.getReferenceTaskName(), task.getStatus()));

    LinkedList<WorkflowTask> wftasks = def.getTasks();
    boolean allCompletedSuccessfully = wftasks.stream().parallel().allMatch(wftask -> {
        Status status = taskStatusMap.get(wftask.getTaskReferenceName());
        return status != null && status.isSuccessful() && status.isTerminal();
    });

    boolean noPendingTasks = taskStatusMap.values().stream().allMatch(st -> st.isTerminal());

    boolean noPendingSchedule = workflow.getTasks().stream().parallel().filter(wftask -> {
        String next = getNextTasksToBeScheduled(def, workflow, wftask);
        return next != null && !taskStatusMap.containsKey(next);
    }).collect(Collectors.toList()).isEmpty();

    if (allCompletedSuccessfully && noPendingTasks && noPendingSchedule) {
        return true;
    }

    return false;
}

From source file:com.ikanow.aleph2.analytics.spark.utils.SparkTechnologyUtils.java

/** Validate the job
 * @param new_analytic_bucket// w  w w .j a  va2  s .c o  m
 * @param jobs
 * @return
 */
public static BasicMessageBean validateJobs(final DataBucketBean new_analytic_bucket,
        final Collection<AnalyticThreadJobBean> jobs) {

    //TODO (ALEPH-63): validate batch enrichment            

    final LinkedList<String> mutable_errs = new LinkedList<>();

    jobs.stream().forEach(job -> {
        if (null == job.config()) {
            mutable_errs.push(ErrorUtils.get(SparkErrorUtils.MISSING_PARAM, new_analytic_bucket.full_name(),
                    job.name(), "config"));
        } else {
            final SparkTopologyConfigBean config = BeanTemplateUtils
                    .from(job.config(), SparkTopologyConfigBean.class).get();
            if (SparkType.jvm == Optional.ofNullable(config.language()).orElse(SparkType.jvm)) { // JVM validation
                if (null == config.entry_point()) {
                    mutable_errs.push(ErrorUtils.get(SparkErrorUtils.MISSING_PARAM,
                            new_analytic_bucket.full_name(), job.name(), "config.entry_point"));
                }
            } else if (SparkType.python == Optional.ofNullable(config.language()).orElse(SparkType.jvm)) { // JVM validation
                if ((null == config.entry_point()) && (null == config.script())) {
                    mutable_errs.push(ErrorUtils.get(SparkErrorUtils.MISSING_PARAM,
                            new_analytic_bucket.full_name(), job.name(), "config.entry_point|config.script"));
                }
            }
        }
    });

    return ErrorUtils.buildMessage(mutable_errs.isEmpty(), SparkTechnologyUtils.class, "validateJobs",
            mutable_errs.stream().collect(Collectors.joining(";")));
}

From source file:se.backede.jeconomix.forms.report.SingleTransactionReport.java

public void filter() {

    companyComboBox.setEnabled(false);/* w  ww .jav a 2  s  .  com*/
    yearComboBox.setEnabled(false);
    monthComboBox.setEnabled(false);

    LinkedList<TransactionDto> allTrasactions = new LinkedList<TransactionDto>(reports.getTransctions());

    CompanyDto company = (CompanyDto) companyComboBox.getSelectedItem();

    String yearString = (String) yearComboBox.getSelectedItem();
    Integer year = Integer.parseInt("0");
    if (yearString != null) {
        if (!yearString.equals(ALL_YEARS)) {
            year = Integer.parseInt(yearString);
        }
    }

    String monthString = (String) monthComboBox.getSelectedItem();
    Month month = null;
    if (monthString != null) {
        if (!monthString.equals(ALL_MONTHS)) {
            month = Month.valueOf(monthString);
        }
    }

    LinkedList<TransactionDto> filteredCompanies = new LinkedList<>(reports.getTransctions());

    if (company != null) {
        if (!company.getName().equals(ALL_COMPANIES)) {
            filteredCompanies = allTrasactions.stream().filter(line -> line.getCompany().equals(company))
                    .collect(Collectors.toCollection(LinkedList::new));
        }
    }

    LinkedList<TransactionDto> filteredByYear = (LinkedList) filteredCompanies.clone();
    if (yearString != null) {
        if (!yearString.equals(ALL_YEARS)) {
            for (TransactionDto filteredTransaction : filteredCompanies) {
                if (!Objects.equals(filteredTransaction.getBudgetYear(), year)) {
                    filteredByYear.remove(filteredTransaction);
                }
            }
        }
    }

    LinkedList<TransactionDto> filteredByMonth = (LinkedList) filteredByYear.clone();
    if (monthString != null) {
        if (!monthString.equals(ALL_MONTHS)) {
            for (TransactionDto filteredTransaction : filteredByYear) {
                if (filteredTransaction.getBudgetMonth() != month) {
                    filteredByMonth.remove(filteredTransaction);
                }
            }
        }
    }

    DefaultTableCellRenderer rightRenderer = new DefaultTableCellRenderer();

    rightRenderer.setHorizontalAlignment(JLabel.RIGHT);

    TransactionCompanyModel transModel = new TransactionCompanyModel(new HashSet<>(filteredByMonth));

    transactionTable.setModel(transModel);

    transactionTable.getColumnModel().getColumn(2).setCellRenderer(rightRenderer);

    transactionSumLabel.setText(transModel.getSum().toString().concat(" Kr"));
    categoryNameLabel.setText(reports.getCategory());

    companyComboBox.setEnabled(true);
    yearComboBox.setEnabled(true);
    monthComboBox.setEnabled(true);

}

From source file:canreg.client.gui.analysis.TableBuilderInternalFrame.java

private void generateTablesAction(FileTypes filetype) {
    boolean filterError = false;

    TableBuilderListElement tble = (TableBuilderListElement) tableTypeList.getSelectedValue();

    if (tble == null) {
        JOptionPane.showMessageDialog(this,
                java.util.ResourceBundle
                        .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame")
                        .getString("NO_TABLE_TYPE_SELECTED"),
                java.util.ResourceBundle
                        .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame")
                        .getString("NO_TABLE_TYPE_SELECTED"),
                JOptionPane.ERROR_MESSAGE);
        return;/*from   w ww  .  j a  v  a  2 s  .  co  m*/
    } else {
        try {
            tableBuilder = TableBuilderFactory.getTableBuilder(tble);
        } catch (FileNotFoundException ex) {
            Logger.getLogger(TableBuilderInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    Set<DatabaseVariablesListElement> variables = new LinkedHashSet<>();
    DistributedTableDescription tableDatadescription;
    JChartTableBuilderInterface chartBuilder;

    if (tableBuilder == null) {
        JOptionPane.showMessageDialog(this,
                java.util.ResourceBundle
                        .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame")
                        .getString("TABLE_TYPE_NOT_YET_IMPLEMENTED"),
                java.util.ResourceBundle
                        .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame")
                        .getString("TABLE_TYPE_NOT_YET_IMPLEMENTED"),
                JOptionPane.ERROR_MESSAGE);
        return;
    } else {
        String heading = headerOfTableTextField.getText();
        int startYear = startYearChooser.getValue();
        int endYear = endYearChooser.getValue();
        PopulationDataset[] populations;
        if (dontUsePopulationDatasetCheckBox.isSelected()) {
            populations = generateDummyPopulationDatasets();
        } else {
            populations = getSelectedPopulations();
        }
        PopulationDataset[] standardPopulations = new PopulationDataset[populations.length];
        tableBuilder.setUnknownAgeCode(CanRegClientApp.getApplication().getGlobalToolBox().getUnknownAgeCode());

        if (tableBuilder.areThesePopulationDatasetsCompatible(populations)) {
            String fileName = null;
            // Choose file name;
            if (filetype != FileTypes.jchart) {
                if (chooser == null) {
                    path = localSettings.getProperty(LocalSettings.TABLES_PATH_KEY);
                    if (path == null) {
                        chooser = new JFileChooser();
                    } else {
                        chooser = new JFileChooser(path);
                    }
                }
                int returnVal = chooser.showSaveDialog(this);
                if (returnVal == JFileChooser.APPROVE_OPTION) {
                    try {
                        localSettings.setProperty(LocalSettings.TABLES_PATH_KEY,
                                chooser.getSelectedFile().getParentFile().getCanonicalPath());
                        fileName = chooser.getSelectedFile().getAbsolutePath();
                    } catch (IOException ex) {
                        Logger.getLogger(TableBuilderInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
                    }
                } else {
                    // cancelled
                    return;
                }
            }

            setCursor(hourglassCursor);

            int i = 0;
            String populationFilterString = "";
            for (PopulationDataset pop : populations) {
                if (pop != null) {
                    int stdPopID = pop.getReferencePopulationID();
                    standardPopulations[i++] = populationDatasetsMap.get(stdPopID);
                    if (populationFilterString.trim().length() == 0) {
                        populationFilterString = pop.getFilter();
                    } else if (!populationFilterString.equalsIgnoreCase(pop.getFilter())) {
                        // population filters not matching on all the pds...
                        filterError = true;
                    }
                }
            }

            Globals.StandardVariableNames[] variablesNeeded = tableBuilder.getVariablesNeeded();
            if (variablesNeeded != null) {
                for (Globals.StandardVariableNames standardVariableName : variablesNeeded) {
                    variables.add(canreg.client.CanRegClientApp.getApplication().getGlobalToolBox()
                            .translateStandardVariableNameToDatabaseListElement(
                                    standardVariableName.toString()));
                }
            }
            DatabaseFilter filter = new DatabaseFilter();
            String tableName = Globals.TUMOUR_AND_PATIENT_JOIN_TABLE_NAME;
            String filterString = rangeFilterPanel.getFilter().trim();

            if (filterString.length() != 0) {
                filterString += " AND ";
            }

            if (populationFilterString.length() != 0) {
                filterString += "( " + populationFilterString + " ) AND ";
            }

            // add the years to the filter
            DatabaseVariablesListElement incidenceDate = canreg.client.CanRegClientApp.getApplication()
                    .getGlobalToolBox().translateStandardVariableNameToDatabaseListElement(
                            Globals.StandardVariableNames.IncidenceDate.toString());
            filterString += incidenceDate.getDatabaseVariableName() + " BETWEEN '" + startYear * 10000
                    + "' AND '" + ((endYear + 1) * 10000 - 1) + "'";

            // filter only the confirmed cases
            DatabaseVariablesListElement recordStatus = canreg.client.CanRegClientApp.getApplication()
                    .getGlobalToolBox().translateStandardVariableNameToDatabaseListElement(
                            Globals.StandardVariableNames.TumourRecordStatus.toString());
            filterString += " AND " + recordStatus.getDatabaseVariableName() + " = '1'";

            // filter away obsolete cases
            DatabaseVariablesListElement recordObsoleteStatus = canreg.client.CanRegClientApp.getApplication()
                    .getGlobalToolBox().translateStandardVariableNameToDatabaseListElement(
                            Globals.StandardVariableNames.ObsoleteFlagTumourTable.toString());
            filterString += " AND " + recordObsoleteStatus.getDatabaseVariableName() + " != '1'";

            filter.setFilterString(filterString);

            System.out.println(filterString);

            filter.setQueryType(DatabaseFilter.QueryType.FREQUENCIES_BY_YEAR);
            filter.setDatabaseVariables(variables);
            DistributedTableDataSourceClient tableDataSource;
            Object[][] incidenceData = null;
            try {
                tableDatadescription = canreg.client.CanRegClientApp.getApplication()
                        .getDistributedTableDescription(filter, tableName);
                tableDataSource = new DistributedTableDataSourceClient(tableDatadescription);
                if (tableDatadescription.getRowCount() > 0) {
                    incidenceData = tableDataSource.retrieveRows(0, tableDatadescription.getRowCount());
                } else {
                    // display error - no lines
                    JOptionPane.showMessageDialog(this,
                            "No incidence data available correspondign to the current filter, period and population.",
                            "No incidence data", JOptionPane.ERROR_MESSAGE);
                }

                // Build the table(s)
                LinkedList<String> filesGenerated = tableBuilder.buildTable(heading, fileName, startYear,
                        endYear, incidenceData, populations, standardPopulations, tble.getConfigFields(),
                        tble.getEngineParameters(), filetype);

                if (filetype != FileTypes.jchart) {

                    String filesGeneratedList = new String();
                    filesGeneratedList = filesGenerated.stream().map((fileN) -> "\n" + fileN)
                            .reduce(filesGeneratedList, String::concat);

                    setCursor(normalCursor);

                    // Opening the resulting files if the list is not empty...
                    if (filesGenerated.isEmpty()) {
                        JOptionPane.showMessageDialog(this,
                                "Please use \"View work files\" in the \"File\"-menu to open them",
                                java.util.ResourceBundle.getBundle(
                                        "canreg/client/gui/analysis/resources/TableBuilderInternalFrame")
                                        .getString("TABLE(S)_BUILT."),
                                JOptionPane.INFORMATION_MESSAGE);
                    } else {
                        JOptionPane.showMessageDialog(this, filesGeneratedList,
                                java.util.ResourceBundle.getBundle(
                                        "canreg/client/gui/analysis/resources/TableBuilderInternalFrame")
                                        .getString("TABLE(S)_BUILT."),
                                JOptionPane.INFORMATION_MESSAGE);
                        filesGenerated.stream().filter((resultFileName) -> (new File(resultFileName).exists()))
                                .forEachOrdered((resultFileName) -> {
                                    try {
                                        canreg.common.Tools.openFile(resultFileName);
                                    } catch (IOException ex) {
                                        JOptionPane.showMessageDialog(this, "Unable to open: " + resultFileName
                                                + "\n" + ex.getLocalizedMessage());
                                        Logger.getLogger(TableBuilderInternalFrame.class.getName())
                                                .log(Level.SEVERE, null, ex);
                                    }
                                });
                    }
                } else {
                    chartBuilder = (JChartTableBuilderInterface) tableBuilder;
                    JFreeChart[] charts = chartBuilder.getCharts();
                    for (JFreeChart chart : charts) {
                        JChartViewerInternalFrame chartViewerInternalFrame = new JChartViewerInternalFrame();
                        chartViewerInternalFrame.setChart(chart);
                        CanRegClientView.showAndPositionInternalFrame(
                                CanRegClientApp.getApplication().getDesktopPane(), chartViewerInternalFrame);
                    }
                    setCursor(normalCursor);
                }

            } catch (SQLException ex) {
                setCursor(normalCursor);
                JOptionPane.showMessageDialog(this,
                        "Something wrong with the SQL query: \n" + ex.getLocalizedMessage(), "Error",
                        JOptionPane.ERROR_MESSAGE);
                Logger.getLogger(TableBuilderInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
            } catch (RemoteException | SecurityException | NotCompatibleDataException
                    | DistributedTableDescriptionException | UnknownTableException ex) {
                Logger.getLogger(TableBuilderInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
            } catch (TableErrorException ex) {
                setCursor(normalCursor);
                Logger.getLogger(TableBuilderInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
                JOptionPane.showMessageDialog(this,
                        "Something went wrong while building the table: \n" + ex.getMessage(), "Error",
                        JOptionPane.ERROR_MESSAGE);
            } finally {
                setCursor(normalCursor);
            }
        } else {
            JOptionPane.showMessageDialog(this,
                    java.util.ResourceBundle
                            .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame")
                            .getString("POPULATION_SET_NOT_COMPATIBLE"),
                    java.util.ResourceBundle
                            .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame")
                            .getString("NO_TABLES_BUILT"),
                    JOptionPane.ERROR_MESSAGE);
        }
    }
}

From source file:uk.ac.ebi.ep.base.search.EnzymeFinder.java

private List<UniprotEntry> computeUniqueEnzymes(List<UniprotEntry> enzymes, String keyword) {
    LinkedList<UniprotEntry> enzymeList = new LinkedList<>();
    LinkedList<UniprotEntry> theEnzymes = new LinkedList<>();
    //Deque<UniprotEntry> enzymeList = new LinkedList<>();
    Set<String> proteinNames = new HashSet<>();
    for (UniprotEntry entry : enzymes) {

        if (!proteinNames.contains(entry.getProteinName())) {

            String enzymeName = HtmlUtility.cleanText(entry.getProteinName()).toLowerCase();
            if (enzymeName.toLowerCase().matches(".*" + keyword.toLowerCase() + ".*")
                    && entry.getEntryType() != 1) {

                enzymeList.offerFirst(entry);

            } else {

                enzymeList.offerLast(entry);

            }/*w  w w  . j av a2s  . com*/

        }

        proteinNames.add(entry.getProteinName());

        computeFilterFacets(entry);

    }

    for (UniprotEntry enzyme : enzymeList) {
        if (HtmlUtility.cleanText(enzyme.getProteinName()).toLowerCase().equalsIgnoreCase(keyword.toLowerCase())
                && enzyme.getEntryType() != 1) {

            LOGGER.info("FOUND A MATCH " + enzyme.getProteinName() + " => " + keyword + " entry type "
                    + enzyme.getEntryType());
            theEnzymes.offerFirst(enzyme);

        } else {
            theEnzymes.offerLast(enzyme);

        }
    }

    return theEnzymes.stream().distinct().collect(Collectors.toList());
}

From source file:oct.analysis.application.comp.EZWorker.java

@Override
protected EZEdgeCoord doInBackground() throws Exception {
    int foveaCenterXPosition = analysisManager.getFoveaCenterXPosition();
    /*//from  ww w . j a va  2 s.  c  o  m
     first get a sharpened version of the OCT and use that to obtain the segmentation
     of the Bruch's membrane. Use a Loess interpolation algorithm to smooth 
     out imperfetions in the segmentation line.
     */
    UnivariateInterpolator interpolator = new LoessInterpolator(0.1, 0);
    ArrayList<Point> rawBrmPoints = new ArrayList<>(analysisManager
            .getSegmentation(new SharpenOperation(15, 0.5F)).getSegment(Segmentation.BrM_SEGMENT));
    double[][] brmSeg = Util.getXYArraysFromPoints(rawBrmPoints);
    UnivariateFunction brmInterp = interpolator.interpolate(brmSeg[0], brmSeg[1]);
    BufferedImage sharpOCT = analysisManager.getSharpenedOctImage(8.5D, 1.0F);
    setProgress(10);
    /*
     Starting from the identified location of the fovea search northward in 
     the image until the most northern pixels northward (in a 3x3 matrix of 
     pixels arround the the search point (X,Y) ) are black (ie. the search
     matrix is has found that the search point isn't totally surrounded by
     white pixels). Then a recursive search algorithm determines if the 
     black area signifies the seperation between bands or simply represents
     a closed (a black blob entirely surrounded by white pixels) black band.
     It will continue searching northward in the image until it can find an 
     open region of all blak pixels. Once this is found it will find the contour
     of the edge between the black and white pixels along the width of the image.
     */
    int searchY = (int) Math.round(brmInterp.value(foveaCenterXPosition)) + 1;
    do {
        searchY--;
    } while (Util.calculateGrayScaleValue(sharpOCT.getRGB(foveaCenterXPosition, searchY)) > 0
            || !isContrastPoint(foveaCenterXPosition, searchY, sharpOCT));
    LinkedList<Point> contour = new LinkedList<>();
    Point startPoint = new Point(foveaCenterXPosition, searchY);
    //find contour by searching for white pixel boundary to te right of the fovea
    contour.add(findContourRight(startPoint, Cardinality.SOUTH, startPoint, Cardinality.SOUTH, contour,
            sharpOCT, 0));
    //search until open black area found (ie. if the search algorithm arrives back at
    //the starting pixel keep moving north to next black area to search)
    while (contour.get(0).equals(startPoint)) {
        contour = new LinkedList<>();
        do {
            searchY--;
        } while (Util.calculateGrayScaleValue(sharpOCT.getRGB(foveaCenterXPosition, searchY)) == 0);
        do {
            searchY--;
        } while (Util.calculateGrayScaleValue(sharpOCT.getRGB(foveaCenterXPosition, searchY)) > 0
                || isSurroundedByWhite(foveaCenterXPosition, searchY, sharpOCT));
        startPoint = new Point(foveaCenterXPosition, searchY);
        contour.add(findContourRight(startPoint, Cardinality.SOUTH, startPoint, Cardinality.SOUTH, contour,
                sharpOCT, 0));
    }
    setProgress(20);
    //open balck space found, complete contour to left of fovea
    contour.add(
            findContourLeft(startPoint, Cardinality.SOUTH, startPoint, Cardinality.SOUTH, contour, sharpOCT));
    analysisManager.getImgPanel().setDrawPoint(new Point(foveaCenterXPosition, searchY));
    setProgress(30);
    /*
     since the contour can snake around due to aberations and low image density 
     we need to create a single line (represented by points) from left to right
     to represent the countour. This is easily done by building a line of
     points consisting of the point with the largest Y value (furthest from 
     the top of the image) at each X value. This eliminates overhangs from the 
     contour line.
     */
    Map<Double, List<Point>> grouped = contour.stream().collect(Collectors.groupingBy(Point::getX));
    List<Point> refinedEZContour = grouped.values().stream().map((List<Point> points) -> {
        int maxY = points.stream().mapToInt((Point p) -> p.y).min().getAsInt();
        return new Point(points.get(0).x, maxY);
    }).sorted((Point p1, Point p2) -> Integer.compare(p1.x, p2.x)).collect(Collectors.toList());
    setProgress(35);
    /*
     Starting from the identified location of the fovea search southward in 
     the image until the most southern pixels (in a 3x3 matrix of 
     pixels arround the the search point (X,Y) ) are black (ie. the search
     matrix has found that the search point isn't totally surrounded by
     white pixels). Then a recursive search algorithm determines if the 
     black area signifies the bottom of the Bruch's membrane or simply represents
     a closed (a black blob entirely surrounded by white pixels) black band.
     It will continue searching southward in the image until it can find an 
     open region of all black pixels. Once this is found it will find the contour
     of the edge between the black and white pixels, along the width of the image,
     of the bottom of the Bruch's membrane.
     */
    //        sharpOCT = getSharpenedOctImage(5D, 1.0F);
    searchY = (int) Math.round(brmInterp.value(foveaCenterXPosition));
    do {
        searchY++;
    } while (Util.calculateGrayScaleValue(sharpOCT.getRGB(foveaCenterXPosition, searchY)) > 0
            || isSurroundedByWhite(foveaCenterXPosition, searchY, sharpOCT));
    contour = new LinkedList<>();
    startPoint = new Point(foveaCenterXPosition, searchY);
    /*
     Find contour by searching for white pixel boundary to te right of the fovea.
     Sometimes the crap below the Bruchs membrane causes too much interferance for the
     algorithm to work properly so we must tweak some of the parameters of the 
     sharpening performed on the image until the algorithm succedes or we can no longer
     tweak parameters. In the case of the later event we can use the raw segmented
     Bruchs membrane as a substitute to keep the method from failing.
     */
    contour.add(findContourRight(startPoint, Cardinality.NORTH, startPoint, Cardinality.NORTH, contour,
            sharpOCT, 0));
    double filtValue = 8.5D;
    boolean tweakFailed = false;
    while (contour.contains(null)) {
        contour = new LinkedList<>();
        filtValue -= 0.5D;
        System.out.println("Reducing sigma to " + filtValue);
        if (filtValue <= 0D) {
            tweakFailed = true;
            break;
        }
        sharpOCT = analysisManager.getSharpenedOctImage(8.5D, 1.0F);
        contour.add(findContourRight(startPoint, Cardinality.NORTH, startPoint, Cardinality.NORTH, contour,
                sharpOCT, 0));
    }

    if (tweakFailed) {
        contour = new LinkedList<>(rawBrmPoints);
    } else {
        //search until open black area found (ie. if the search algorithm arrives back at
        //the starting pixel keep moving south to next black area to search)
        while (contour.get(0).equals(startPoint)) {
            contour = new LinkedList<>();
            do {
                searchY++;
            } while (Util.calculateGrayScaleValue(sharpOCT.getRGB(foveaCenterXPosition, searchY)) == 0);
            do {
                searchY++;
            } while (Util.calculateGrayScaleValue(sharpOCT.getRGB(foveaCenterXPosition, searchY)) > 0
                    || isSurroundedByWhite(foveaCenterXPosition, searchY, sharpOCT));
            startPoint = new Point(foveaCenterXPosition, searchY);
            contour.add(findContourRight(startPoint, Cardinality.NORTH, startPoint, Cardinality.NORTH, contour,
                    sharpOCT, 0));
        }
        setProgress(45);
        //open balck space found, complete contour to left of fovea
        contour.add(findContourLeft(startPoint, Cardinality.NORTH, startPoint, Cardinality.NORTH, contour,
                sharpOCT));
    }
    setProgress(55);
    /*
     since the contour can snake around due to aberations and low image density 
     we need to create a single line (represented by points) from left to right
     to represent the countour. This is easily done by building a line of
     points consisting of the point with the smallest Y value (closest to 
     the top of the image) at each X value. This eliminates overhangs from the 
     contour line.
     */
    grouped = contour.stream().collect(Collectors.groupingBy(Point::getX));
    List<Point> refinedBruchsMembraneContour = grouped.values().stream().map((List<Point> points) -> {
        int minY = points.stream().mapToInt((Point p) -> p.y).min().getAsInt();
        return new Point(points.get(0).x, minY);
    }).sorted((Point p1, Point p2) -> Integer.compare(p1.x, p2.x)).collect(Collectors.toList());
    setProgress(70);

    /*
     use a Loess interpolator again to smooth the new contours of the EZ and Bruch's Membrane
     */
    double[][] refinedContourPoints = Util.getXYArraysFromPoints(refinedEZContour);
    UnivariateFunction interpEZContour = interpolator.interpolate(refinedContourPoints[0],
            refinedContourPoints[1]);
    refinedContourPoints = Util.getXYArraysFromPoints(refinedBruchsMembraneContour);
    UnivariateFunction interpBruchsContour = interpolator.interpolate(refinedContourPoints[0],
            refinedContourPoints[1]);

    /*
     find the average difference in the distance in the Y between the 10 pixels
     at each end of the Bruch's Membrane contour and the contour created
     along the top of the EZ.
     */
    //since the lines are sorted on X position it is easy to align the lines
    //based on the tails of each line
    int minX = refinedEZContour.get(0).x;
    int maxX;
    //the interpolator can shorten the range of the X values from the original supplied
    //so we need to test where the end of the range occurs since it isn't directly accessible
    for (maxX = refinedEZContour.get(refinedEZContour.size() - 1).x; maxX > minX; maxX--) {
        try {
            double tmp = interpEZContour.value(maxX) - interpBruchsContour.value(maxX);
            //if this break is reached we have found the max value the interpolators will allow
            break;
        } catch (OutOfRangeException oe) {
            //do nothing but let loop continue
        }
    }
    double avgDif = Stream
            .concat(IntStream.range(minX + 30, minX + 50).boxed(),
                    IntStream.range(maxX - 49, maxX - 28).boxed())
            .mapToDouble(x -> interpBruchsContour.value(x) - interpEZContour.value(x)).average().getAsDouble();

    int height = sharpOCT.getHeight();//make to use in lambda expression
    List<LinePoint> ezLine = IntStream.rangeClosed(minX, maxX)
            .mapToObj(x -> new LinePoint(x, height - interpEZContour.value(x) - avgDif))
            .collect(Collectors.toList());
    List<LinePoint> bmLine = IntStream.rangeClosed(minX, maxX)
            .mapToObj(x -> new LinePoint(x, height - interpBruchsContour.value(x)))
            .collect(Collectors.toList());
    List<LinePoint> bmUnfiltLine = refinedBruchsMembraneContour.stream()
            .map((Point p) -> new LinePoint(p.x, height - p.getY())).collect(Collectors.toList());
    Util.graphPoints(ezLine, bmLine, bmUnfiltLine);
    analysisManager.getImgPanel().setDrawnLines(
            IntStream.rangeClosed(minX, maxX).mapToObj(x -> new LinePoint(x, interpEZContour.value(x)))
                    .collect(Collectors.toList()),
            IntStream.rangeClosed(minX, maxX).mapToObj(x -> new LinePoint(x, interpBruchsContour.value(x)))
                    .collect(Collectors.toList()));
    /*
     Find the difference between the two contours (Bruch's membrane and the
     EZ + Bruch's membrane) and use this to determine where the edge of the
     EZ is
     */
    List<LinePoint> diffLine = findDiffWithAdjustment(interpBruchsContour, 0D, interpEZContour, avgDif, minX,
            maxX);
    setProgress(90);
    //        List<LinePoint> peaks = Util.findPeaksAndVallies(diffLine);
    //        Util.graphPoints(diffLine, peaks);

    /*
     Find the first zero crossings of the difference line on both sides of the fovea.
     If a zero crossing can't be found then search for the first crossing of a
     value of 1, then 2, then 3, etc. until an X coordinate of a crossing is
     found on each side of the fovea.
     */
    OptionalInt ezLeftEdge;
    double crossingThreshold = 0.25D;
    do {
        double filtThresh = crossingThreshold;
        System.out.println("Crossing threshold = " + crossingThreshold);
        ezLeftEdge = diffLine.stream().filter(lp -> lp.getY() <= filtThresh && lp.getX() < foveaCenterXPosition)
                .mapToInt(LinePoint::getX).max();
        crossingThreshold += 0.25D;
    } while (!ezLeftEdge.isPresent());
    OptionalInt ezRightEdge;
    crossingThreshold = 0.25D;
    do {
        double filtThresh = crossingThreshold;
        System.out.println("Crossing threshold = " + crossingThreshold);
        ezRightEdge = diffLine.stream()
                .filter(lp -> lp.getY() <= filtThresh && lp.getX() > foveaCenterXPosition)
                .mapToInt(LinePoint::getX).min();
        crossingThreshold += 0.25D;
    } while (!ezRightEdge.isPresent());
    //return findings
    return new EZEdgeCoord(ezLeftEdge.getAsInt(), ezRightEdge.getAsInt());
}

From source file:ddf.catalog.transformer.input.pdf.GeoPdfParserImpl.java

/**
 * Generates a WKT compliant String from a PDF Document if it contains GeoPDF information.
 * Currently, only WGS84 Projections are supported (GEOGRAPHIC GeoPDF ProjectionType).
 *
 * @param pdfDocument - The PDF document
 * @return the WKT String//  w  w  w  . ja v a 2  s  .c  o m
 * @throws IOException
 */
@Override
public String apply(PDDocument pdfDocument) throws IOException {
    ToDoubleVisitor toDoubleVisitor = new ToDoubleVisitor();
    LinkedList<String> polygons = new LinkedList<>();

    for (PDPage pdPage : pdfDocument.getPages()) {
        COSDictionary cosObject = pdPage.getCOSObject();

        COSBase lgiDictObject = cosObject.getObjectFromPath(LGIDICT);

        // Handle Multiple Map Frames
        if (lgiDictObject instanceof COSArray) {
            for (int i = 0; i < ((COSArray) lgiDictObject).size(); i++) {
                COSDictionary lgidict = (COSDictionary) cosObject.getObjectFromPath(LGIDICT + "/[" + i + "]");

                COSDictionary projectionArray = (COSDictionary) lgidict.getDictionaryObject(PROJECTION);
                if (projectionArray != null) {
                    String projectionType = ((COSString) projectionArray.getItem(PROJECTION_TYPE)).getString();
                    if (GEOGRAPHIC.equals(projectionType)) {
                        COSArray neatlineArray = (COSArray) cosObject
                                .getObjectFromPath(LGIDICT + "/[" + i + "]/" + NEATLINE);
                        getWktFromNeatLine(lgidict, neatlineArray, toDoubleVisitor).ifPresent(polygons::add);
                    } else {
                        LOGGER.debug("Unsupported projection type {}.  Map Frame will be skipped.",
                                projectionType);
                    }
                } else {
                    LOGGER.debug("No projection array found on the map frame.  Map Frame will be skipped.");
                }
            }
            // Handle One Map Frame
        } else if (lgiDictObject instanceof COSDictionary) {
            COSDictionary lgidict = (COSDictionary) lgiDictObject;
            COSDictionary projectionArray = (COSDictionary) lgidict.getDictionaryObject(PROJECTION);
            if (projectionArray != null) {
                String projectionType = ((COSString) projectionArray.getItem(PROJECTION_TYPE)).getString();
                if (GEOGRAPHIC.equals(projectionType)) {
                    COSArray neatlineArray = (COSArray) cosObject.getObjectFromPath(LGIDICT + "/" + NEATLINE);
                    if (neatlineArray == null) {
                        neatlineArray = generateNeatLineFromPDFDimensions(pdPage);
                    }

                    getWktFromNeatLine(lgidict, neatlineArray, toDoubleVisitor).ifPresent(polygons::add);
                } else {
                    LOGGER.debug("Unsupported projection type {}.  Map Frame will be skipped.", projectionType);
                }
            } else {
                LOGGER.debug("No projection array found on the map frame.  Map Frame will be skipped.");
            }
        }
    }

    if (polygons.size() == 0) {
        LOGGER.debug(
                "No GeoPDF information found on PDF during transformation.  Metacard location will not be set.");
        return null;
    }

    if (polygons.size() == 1) {
        return POLYGON + polygons.get(0) + "))";
    } else {
        return polygons.stream().map(polygon -> "((" + polygon + "))")
                .collect(Collectors.joining(",", MULTIPOLYGON, ")"));
    }
}

From source file:ddf.catalog.transformer.input.pdf.GeoPdfParser.java

/**
 * Generates a WKT compliant String from a PDF Document if it contains GeoPDF information.
 * Currently, only WGS84 Projections are supported (GEOGRAPHIC GeoPDF ProjectionType).
 *
 * @param pdfDocument - The PDF document
 * @return the WKT String/*  w w w. j  a  v  a 2s . co m*/
 * @throws IOException
 */
public String getWktFromPDF(PDDocument pdfDocument) throws IOException {
    ToDoubleVisitor toDoubleVisitor = new ToDoubleVisitor();
    LinkedList<String> polygons = new LinkedList<>();

    for (PDPage pdPage : pdfDocument.getPages()) {
        COSDictionary cosObject = pdPage.getCOSObject();

        COSBase lgiDictObject = cosObject.getObjectFromPath(LGIDICT);

        // Handle Multiple Map Frames
        if (lgiDictObject instanceof COSArray) {
            for (int i = 0; i < ((COSArray) lgiDictObject).size(); i++) {
                COSDictionary lgidict = (COSDictionary) cosObject.getObjectFromPath(LGIDICT + "/[" + i + "]");

                COSDictionary projectionArray = (COSDictionary) lgidict.getDictionaryObject(PROJECTION);
                if (projectionArray != null) {
                    String projectionType = ((COSString) projectionArray.getItem(PROJECTION_TYPE)).getString();
                    if (GEOGRAPHIC.equals(projectionType)) {
                        COSArray neatlineArray = (COSArray) cosObject
                                .getObjectFromPath(LGIDICT + "/[" + i + "]/" + NEATLINE);
                        String wktString = getWktFromNeatLine(lgidict, neatlineArray, toDoubleVisitor);
                        polygons.add(wktString);
                    } else {
                        LOGGER.debug("Unsupported projection type {}.  Map Frame will be skipped.",
                                projectionType);
                    }
                } else {
                    LOGGER.debug("No projection array found on the map frame.  Map Frame will be skipped.");
                }
            }
            // Handle One Map Frame
        } else if (lgiDictObject instanceof COSDictionary) {
            COSDictionary lgidict = (COSDictionary) lgiDictObject;
            COSDictionary projectionArray = (COSDictionary) lgidict.getDictionaryObject(PROJECTION);
            if (projectionArray != null) {
                String projectionType = ((COSString) projectionArray.getItem(PROJECTION_TYPE)).getString();
                if (GEOGRAPHIC.equals(projectionType)) {
                    COSArray neatlineArray = (COSArray) cosObject.getObjectFromPath(LGIDICT + "/" + NEATLINE);
                    if (neatlineArray == null) {
                        neatlineArray = generateNeatLineFromPDFDimensions(pdPage);

                    }
                    polygons.add(getWktFromNeatLine(lgidict, neatlineArray, toDoubleVisitor));

                } else {
                    LOGGER.debug("Unsupported projection type {}.  Map Frame will be skipped.", projectionType);
                }
            } else {
                LOGGER.debug("No projection array found on the map frame.  Map Frame will be skipped.");
            }
        }
    }

    if (polygons.size() == 0) {
        LOGGER.debug(
                "No GeoPDF information found on PDF during transformation.  Metacard location will not be set.");
        return null;
    }

    if (polygons.size() == 1) {
        return POLYGON + polygons.get(0) + "))";
    } else {
        return polygons.stream().map(polygon -> "((" + polygon + "))")
                .collect(Collectors.joining(",", MULTIPOLYGON, ")"));
    }
}

From source file:com.ikanow.aleph2.enrichment.utils.services.TestJsScriptEngineService.java

public void test_end2end(final String js_name) throws IOException {
    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final String user_script = Resources.toString(Resources.getResource(js_name), Charsets.UTF_8);

    final JsScriptEngineService service_under_test = new JsScriptEngineService();

    final DataBucketBean bucket = Mockito.mock(DataBucketBean.class);
    //final IEnrichmentModuleContext context = Mockito.mock(IEnrichmentModuleContext.class);

    final LinkedList<ObjectNode> emitted = new LinkedList<>();
    final LinkedList<JsonNode> grouped = new LinkedList<>();
    final LinkedList<JsonNode> externally_emitted = new LinkedList<>();

    final IEnrichmentModuleContext context = Mockito.mock(IEnrichmentModuleContext.class, new Answer<Void>() {
        @SuppressWarnings("unchecked")
        public Void answer(InvocationOnMock invocation) {
            try {
                Object[] args = invocation.getArguments();
                assertTrue("Unexpected call to context object during test: " + invocation.getMethod().getName(),
                        invocation.getMethod().getName().equals("emitMutableObject")
                                || invocation.getMethod().getName().equals("externalEmit")
                                || invocation.getMethod().getName().equals("getLogger"));
                if (invocation.getMethod().getName().equals("emitMutableObject")) {
                    final Optional<JsonNode> grouping = (Optional<JsonNode>) args[3];
                    if (grouping.isPresent()) {
                        grouped.add(grouping.get());
                    }/*from  w  w  w. j  a  v a 2s . c  o m*/
                    emitted.add((ObjectNode) args[1]);
                } else if (invocation.getMethod().getName().equals("externalEmit")) {
                    final DataBucketBean to = (DataBucketBean) args[0];
                    final Either<JsonNode, Map<String, Object>> out = (Either<JsonNode, Map<String, Object>>) args[1];
                    externally_emitted.add(((ObjectNode) out.left().value()).put("bucket", to.full_name()));
                }
            } catch (Exception e) {
                fail(e.getMessage());
            }
            return null;
        }
    });

    final EnrichmentControlMetadataBean control = BeanTemplateUtils.build(EnrichmentControlMetadataBean.class)
            .with(EnrichmentControlMetadataBean::config,
                    new LinkedHashMap<String, Object>(ImmutableMap.<String, Object>builder()
                            .put("script", user_script)
                            .put("config", ImmutableMap.<String, Object>builder().put("test", "config").build())
                            .put("imports", Arrays.asList("underscore-min.js")).build()))
            .done().get();

    service_under_test.onStageInitialize(context, bucket, control,
            Tuples._2T(ProcessingStage.batch, ProcessingStage.grouping),
            Optional.of(Arrays.asList("test1", "test2")));

    final List<Tuple2<Long, IBatchRecord>> batch = Arrays
            .asList(new BatchRecord(mapper.readTree("{\"test\":\"1\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"2\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"3\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"4\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"5\"}")))
            .stream().<Tuple2<Long, IBatchRecord>>map(br -> Tuples._2T(0L, br)).collect(Collectors.toList());

    service_under_test.onObjectBatch(batch.stream(), Optional.of(5),
            Optional.of(mapper.readTree("{\"key\":\"static\"}")));
    assertEquals(20, emitted.size());
    emitted.stream().forEach(on -> {
        if (on.has("len"))
            assertEquals(5, on.get("len").asInt());
        else if (on.has("grouping_key"))
            assertEquals("{\"key\":\"static\"}", on.get("grouping_key").toString());
        else if (on.has("prev")) {
            assertEquals("batch", on.get("prev").asText());
            assertEquals("grouping", on.get("next").asText());
            assertEquals("{\"test\":\"config\"}", on.get("config").toString());
            assertEquals(2, on.get("groups").size());
            //DEBUG
            //System.out.println(on.toString());
        } else {
            fail("missing field" + on.toString());
        }
    });

    assertEquals(5, grouped.size());
    assertTrue(grouped.stream().map(j -> j.toString()).allMatch(s -> s.equals("{\"key\":\"static\"}")));
    assertEquals(5, externally_emitted.size());

    // Finally, check cloning

    final IEnrichmentBatchModule service_under_test_2 = service_under_test.cloneForNewGrouping();

    final List<Tuple2<Long, IBatchRecord>> batch2 = Arrays
            .asList(new BatchRecord(mapper.readTree("{\"test\":\"1\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"2\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"3\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"4\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"5\"}")))
            .stream().<Tuple2<Long, IBatchRecord>>map(br -> Tuples._2T(0L, br)).collect(Collectors.toList());

    emitted.clear();
    assertEquals(0, emitted.size());
    service_under_test_2.onObjectBatch(batch2.stream(), Optional.empty(), Optional.empty());
    assertEquals(20, emitted.size());
    emitted.stream().forEach(on -> {
        //DEBUG
        //System.out.println(on.toString());

        assertFalse("Wrong format: " + on.toString(), on.has("len"));
        assertFalse("Wrong format: " + on.toString(), on.has("grouping_key"));
        if (on.has("prev")) {
            assertEquals("batch", on.get("prev").asText());
            assertEquals("grouping", on.get("next").asText());
            assertEquals("{\"test\":\"config\"}", on.get("config").toString());
            assertEquals(2, on.get("groups").size());
        }
    });

}