Example usage for java.util TreeSet add

List of usage examples for java.util TreeSet add

Introduction

In this page you can find the example usage for java.util TreeSet add.

Prototype

public boolean add(E e) 

Source Link

Document

Adds the specified element to this set if it is not already present.

Usage

From source file:guineu.modules.filter.Alignment.RANSACGCGC.RansacGCGCAlignerTask.java

/**
 *
 * @param peakList/*from w w  w  .  j av  a  2s. co  m*/
 * @return
 */
private HashMap<PeakListRow, PeakListRow> getAlignmentMap(Dataset peakList) {

    // Create a table of mappings for best scores
    HashMap<PeakListRow, PeakListRow> alignmentMapping = new HashMap<PeakListRow, PeakListRow>();

    if (alignedPeakList.getNumberRows() < 1) {
        return alignmentMapping;
    }

    // Create a sorted set of scores matching
    TreeSet<RowVsRowGCGCScore> scoreSet = new TreeSet<RowVsRowGCGCScore>();

    // RANSAC algorithm
    List<AlignGCGCStructMol> list = ransacPeakLists(alignedPeakList, peakList);
    PolynomialFunction function = this.getPolynomialFunction(list,
            ((SimpleGCGCDataset) alignedPeakList).getRowsRTRange());

    PeakListRow allRows[] = peakList.getRows().toArray(new PeakListRow[0]);

    for (PeakListRow row : allRows) {
        double rt = 0;
        if (!this.useOnlyRTI) {
            try {
                rt = function.value(((SimplePeakListRowGCGC) row).getRT1());
                if (Double.isNaN(rt) || rt == -1) {
                    rt = ((SimplePeakListRowGCGC) row).getRT1();
                }
            } catch (Exception ee) {
            }
        } else {
            try {
                rt = function.value(((SimplePeakListRowGCGC) row).getRTI());
                if (Double.isNaN(rt) || rt == -1) {
                    rt = ((SimplePeakListRowGCGC) row).getRTI();
                }
            } catch (Exception ee) {
            }
        }
        PeakListRow candidateRows[] = null;
        if (!this.useOnlyRTI) {
            Range RTIRange = this.rtiTolerance.getToleranceRange(((SimplePeakListRowGCGC) row).getRTI());
            Range RT1Range = this.rtToleranceAfterRTcorrection.getToleranceRange(rt);
            Range RT2Range = this.rt2Tolerance.getToleranceRange(((SimplePeakListRowGCGC) row).getRT2());
            // Get all rows of the aligned peaklist within parameter limits
            candidateRows = ((SimpleGCGCDataset) alignedPeakList).getRowsInsideRT1RT2RTIRange(RT1Range,
                    RT2Range, RTIRange);
        } else {
            Range RTIRange = this.rtiTolerance.getToleranceRange(((SimplePeakListRowGCGC) row).getRTI());
            candidateRows = ((SimpleGCGCDataset) alignedPeakList).getRowsInsideRT1RT2RTIRange(RTIRange);
        }
        for (PeakListRow candidate : candidateRows) {
            RowVsRowGCGCScore score;
            try {
                score = new RowVsRowGCGCScore(row, candidate, rtiTolerance.getTolerance(),
                        rtToleranceAfterRTcorrection.getTolerance(), rt);

                scoreSet.add(score);
                errorMessage = score.getErrorMessage();

            } catch (Exception e) {
                e.printStackTrace();
                setStatus(TaskStatus.ERROR);
                return null;
            }
        }
        progress = (double) processedRows++ / (double) totalRows;
    }

    // Iterate scores by descending order
    Iterator<RowVsRowGCGCScore> scoreIterator = scoreSet.iterator();
    while (scoreIterator.hasNext()) {

        RowVsRowGCGCScore score = scoreIterator.next();

        // Check if the row is already mapped
        if (alignmentMapping.containsKey(score.getPeakListRow())) {
            continue;
        }

        // Check if the spectra score is unacceptable
        if (score.score == -10) {
            continue;
        }

        // Check if the aligned row is already filled
        if (alignmentMapping.containsValue(score.getAlignedRow())) {
            continue;
        }

        alignmentMapping.put(score.getPeakListRow(), score.getAlignedRow());

    }

    return alignmentMapping;
}

From source file:edu.ku.brc.specify.config.FixDBAfterLogin.java

/**
 * //from w  ww  .j a  va2s.  c o m
 */
public static void fixUserPermissions(final boolean doSilently) {
    final String FIXED_USER_PERMS = "FIXED_USER_PERMS";
    boolean isAlreadyFixed = AppPreferences.getRemote().getBoolean(FIXED_USER_PERMS, false);
    if (isAlreadyFixed) {
        return;
    }

    String whereStr = " WHERE p.GroupSubClass = 'edu.ku.brc.af.auth.specify.principal.UserPrincipal' ";
    String whereStr2 = "AND p.userGroupScopeID IS NULL";

    String postSQL = " FROM specifyuser su "
            + "INNER JOIN specifyuser_spprincipal ss ON su.SpecifyUserID = ss.SpecifyUserID "
            + "INNER JOIN spprincipal p ON ss.SpPrincipalID = p.SpPrincipalID "
            + "LEFT JOIN spprincipal_sppermission pp ON p.SpPrincipalID = pp.SpPrincipalID "
            + "LEFT OUTER JOIN sppermission pm ON pp.SpPermissionID = pm.SpPermissionID " + whereStr;

    String sql = "SELECT COUNT(*)" + postSQL + whereStr2;
    log.debug(sql);
    if (BasicSQLUtils.getCountAsInt(sql) < 1) {
        sql = "SELECT COUNT(*)" + postSQL;
        log.debug(sql);
        if (BasicSQLUtils.getCountAsInt(sql) > 0) {
            return;
        }
    }

    final String updatePermSQL = "DELETE FROM %s WHERE SpPermissionID = %d";
    final String updatePrinSQL = "DELETE FROM %s WHERE SpPrincipalID = %d";

    sql = "SELECT p.SpPrincipalID, pp.SpPermissionID" + postSQL;
    log.debug(sql);

    HashSet<Integer> prinIds = new HashSet<Integer>();
    for (Object[] row : query(sql)) {
        Integer prinId = (Integer) row[0];
        if (prinId != null) {
            prinIds.add(prinId);
        }

        Integer permId = (Integer) row[1];
        if (permId != null) {
            update(String.format(updatePermSQL, "spprincipal_sppermission", permId));
            update(String.format(updatePermSQL, "sppermission", permId));
            log.debug("Removing PermId: " + permId);
        }
    }

    StringBuilder sb1 = new StringBuilder();
    for (Integer prinId : prinIds) {
        update(String.format(updatePrinSQL, "specifyuser_spprincipal", prinId));
        update(String.format(updatePrinSQL, "spprincipal", prinId));
        log.debug("Removing PrinId: " + prinId);
        if (sb1.length() > 0)
            sb1.append(",");
        sb1.append(prinId.toString());
    }
    log.debug("(" + sb1.toString() + ")");

    // Create all the necessary UperPrincipal records
    // Start by figuring out what group there are and then create one UserPrincipal record
    // for each one

    TreeSet<String> nameSet = new TreeSet<String>();
    sql = "SELECT su.Name, su.SpecifyUserID, p.userGroupScopeID, p.SpPrincipalID FROM specifyuser su "
            + "INNER JOIN specifyuser_spprincipal sp ON su.SpecifyUserID = sp.SpecifyUserID "
            + "INNER JOIN spprincipal p ON sp.SpPrincipalID = p.SpPrincipalID "
            + "WHERE p.GroupSubClass = 'edu.ku.brc.af.auth.specify.principal.GroupPrincipal'";

    String fields = "TimestampCreated, TimestampModified, Version, GroupSubClass, groupType, Name, Priority, Remarks, userGroupScopeID, CreatedByAgentID, ModifiedByAgentID";
    String insertSQL = "INSERT INTO spprincipal (" + fields + ") VALUES(?,?,?,?,?,?,?,?,?,?,?)";
    String insertSQL2 = "INSERT INTO specifyuser_spprincipal (SpecifyUserID, SpPrincipalID) VALUES(?,?)";

    String searchSql = "SELECT " + fields + " FROM spprincipal WHERE SpPrincipalID = ?";

    sb1 = new StringBuilder();

    PreparedStatement selStmt = null;
    PreparedStatement pStmt = null;
    PreparedStatement pStmt2 = null;
    try {
        Connection conn = DBConnection.getInstance().getConnection();

        pStmt = conn.prepareStatement(insertSQL, Statement.RETURN_GENERATED_KEYS);
        pStmt2 = conn.prepareStatement(insertSQL2);
        selStmt = conn.prepareStatement(searchSql);

        String adtSQL = "SELECT DISTINCT ca.AgentID FROM specifyuser AS su INNER Join agent AS ca ON su.CreatedByAgentID = ca.AgentID";
        Integer createdById = BasicSQLUtils.getCount(conn, adtSQL);
        if (createdById == null) {
            createdById = BasicSQLUtils.getCount(conn,
                    "SELECT AgentID FROM agent ORDER BY AgentID ASC LIMIT 0,1");
            if (createdById == null) {
                UIRegistry.showError("The permissions could not be fixed because there were no agents.");
                AppPreferences.shutdownAllPrefs();
                DBConnection.shutdownFinalConnection(true, true);
                return;
            }
        }

        for (Object[] row : query(sql)) {
            String usrName = (String) row[0];
            Integer userId = (Integer) row[1];
            Integer collId = (Integer) row[2];
            Integer prinId = (Integer) row[3];

            nameSet.add(usrName);

            log.debug("usrName: " + usrName + "  prinId: " + prinId);
            if (sb1.length() > 0)
                sb1.append(",");
            sb1.append(prinId.toString());

            selStmt.setInt(1, prinId);
            ResultSet rs = selStmt.executeQuery();
            if (rs.next()) {
                log.debug(String.format("%s - adding UserPrincipal for Collection  %d / %d", usrName,
                        rs.getInt(9), collId));
                Integer createdByAgentID = (Integer) rs.getObject(10);
                Integer modifiedByAgentID = (Integer) rs.getObject(11);

                pStmt.setTimestamp(1, rs.getTimestamp(1));
                pStmt.setTimestamp(2, rs.getTimestamp(2));
                pStmt.setInt(3, 1); // Version
                pStmt.setString(4, "edu.ku.brc.af.auth.specify.principal.UserPrincipal"); // GroupSubClass
                pStmt.setString(5, null); // groupType
                pStmt.setString(6, rs.getString(6)); // Name
                pStmt.setInt(7, 80); // Priority
                pStmt.setString(8, rs.getString(8)); // Remarks
                pStmt.setInt(9, rs.getInt(9)); // userGroupScopeID
                pStmt.setInt(10, createdByAgentID != null ? createdByAgentID : createdById);
                pStmt.setInt(11, modifiedByAgentID != null ? modifiedByAgentID : createdById);

                // Create UserPrincipal
                pStmt.executeUpdate();

                int newPrinId = BasicSQLUtils.getInsertedId(pStmt);

                // Join the new Principal to the SpecifyUser record
                pStmt2.setInt(1, userId);
                pStmt2.setInt(2, newPrinId);
                pStmt2.executeUpdate();

            } else {
                // error
            }
            rs.close();
        }

        log.debug("(" + sb1.toString() + ")");

        AppPreferences.getRemote().putBoolean(FIXED_USER_PERMS, true);

    } catch (Exception ex) {
        ex.printStackTrace();
    } finally {
        try {
            if (pStmt != null)
                pStmt.close();
            if (pStmt2 != null)
                pStmt2.close();
            if (selStmt != null)
                selStmt.close();
        } catch (Exception ex) {
        }
    }

    final StringBuilder sb = new StringBuilder();
    for (String nm : nameSet) {
        if (sb.length() > 0)
            sb.append('\n');
        sb.append(nm);
    }

    if (!doSilently) {
        JTextArea ta = UIHelper.createTextArea(15, 30);
        ta.setText(sb.toString());
        ta.setEditable(false);

        JEditorPane htmlPane = new JEditorPane("text/html", //$NON-NLS-1$
                UIRegistry.getResourceString("FDBAL_PERMFIXEDDESC"));
        htmlPane.setEditable(false);
        htmlPane.setOpaque(false);

        CellConstraints cc = new CellConstraints();
        PanelBuilder pb = new PanelBuilder(new FormLayout("f:p:g", "p:g,8px,f:p:g"));
        pb.add(htmlPane, cc.xy(1, 1));
        pb.add(UIHelper.createScrollPane(ta), cc.xy(1, 3));
        pb.setDefaultDialogBorder();

        CustomDialog dlg = new CustomDialog((Frame) UIRegistry.getMostRecentWindow(),
                UIRegistry.getResourceString("FDBAL_PERMFIXED"), true, CustomDialog.OK_BTN, pb.getPanel());
        dlg.setOkLabel(UIRegistry.getResourceString("CLOSE"));
        UIHelper.centerAndShow(dlg);
    }
}

From source file:elh.eus.absa.Features.java

/**
 *     POS tags ngram extraction from a kaf document
 * //from  w  ww  . ja  v  a2s.  c  o  m
 * @param int length : which 'n' use for 'n-grams' 
 * @param KAFDocument kafDoc : postagged kaf document to extract ngrams from.
 * @param boolean save : safe ngrams to file or not. 
 * @return TreeSet<String> return lemma ngrams of length length
 */
public TreeSet<String> extractPOStags(KAFDocument kafDoc, boolean save) {
    TreeSet<String> result = new TreeSet<String>();
    for (Term term : kafDoc.getTerms()) {
        String pos = "POS_" + term.getPos();
        // for good measure, test that the lemma is not already included as a feature
        if (!getAttIndexes().containsKey(pos)) {
            addNumericFeature(pos);
            result.add(pos);
        }

    }
    return result;
}

From source file:net.java.sip.communicator.impl.history.HistoryReaderImpl.java

private QueryResultSet<HistoryRecord> find(Date startDate, Date endDate, String[] keywords, String field,
        boolean caseSensitive) {
    TreeSet<HistoryRecord> result = new TreeSet<HistoryRecord>(new HistoryRecordComparator());

    Vector<String> filelist = filterFilesByDate(this.historyImpl.getFileList(), startDate, endDate);

    double currentProgress = HistorySearchProgressListener.PROGRESS_MINIMUM_VALUE;
    double fileProgressStep = HistorySearchProgressListener.PROGRESS_MAXIMUM_VALUE;

    if (filelist.size() != 0)
        fileProgressStep = HistorySearchProgressListener.PROGRESS_MAXIMUM_VALUE / filelist.size();

    // start progress - minimum value
    fireProgressStateChanged(startDate, endDate, keywords,
            HistorySearchProgressListener.PROGRESS_MINIMUM_VALUE);

    SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
    Iterator<String> fileIterator = filelist.iterator();
    while (fileIterator.hasNext()) {
        String filename = fileIterator.next();

        Document doc = this.historyImpl.getDocumentForFile(filename);

        if (doc == null)
            continue;

        NodeList nodes = doc.getElementsByTagName("record");

        double nodesProgressStep = fileProgressStep;

        if (nodes.getLength() != 0)
            nodesProgressStep = fileProgressStep / nodes.getLength();

        Node node;//from   ww  w. j  av a 2  s  .  c om
        for (int i = 0; i < nodes.getLength(); i++) {
            node = nodes.item(i);

            Date timestamp;
            String ts = node.getAttributes().getNamedItem("timestamp").getNodeValue();
            try {
                timestamp = sdf.parse(ts);
            } catch (ParseException e) {
                timestamp = new Date(Long.parseLong(ts));
            }

            if (isInPeriod(timestamp, startDate, endDate)) {
                NodeList propertyNodes = node.getChildNodes();

                HistoryRecord record = filterByKeyword(propertyNodes, timestamp, keywords, field,
                        caseSensitive);

                if (record != null) {
                    result.add(record);
                }
            }

            currentProgress += nodesProgressStep;
            fireProgressStateChanged(startDate, endDate, keywords, (int) currentProgress);
        }
    }

    // if maximum value is not reached fire an event
    if ((int) currentProgress < HistorySearchProgressListener.PROGRESS_MAXIMUM_VALUE) {
        fireProgressStateChanged(startDate, endDate, keywords,
                HistorySearchProgressListener.PROGRESS_MAXIMUM_VALUE);
    }

    return new OrderedQueryResultSet<HistoryRecord>(result);
}

From source file:com.peterbochs.instrument.InstrumentPanel.java

public void update2DChart() {
    // jfcMemory.getCategoryPlot().setDataset(createMemoryDataset());
    jfcMemory.getXYPlot().setDataset(createDataset());
    MyXYBlockRenderer renderer = (MyXYBlockRenderer) jfcMemory.getXYPlot().getRenderer();
    int largest = findLargest(
            Data.getChartData(CommonLib.convertFilesize((String) jFromComboBox.getSelectedItem()),
                    CommonLib.convertFilesize((String) jToComboBox.getSelectedItem()),
                    CommonLib.convertFilesize((String) jBlockSizeComboBox.getSelectedItem())));
    if (largest == 0) {
        largest = 1;/*from   ww  w. ja v a 2 s . c o  m*/
    }
    LookupPaintScale paintScale = new LookupPaintScale(0, largest, background);
    if (largest > 1) {
        // int mean =
        // medianWithoutZero(Data.getChartData(CommonLib.convertFilesize((String)
        // jFromComboBox.getSelectedItem()),
        // CommonLib.convertFilesize((String) jToComboBox
        // .getSelectedItem())));
        int m[] = Data.getChartData(CommonLib.convertFilesize((String) jFromComboBox.getSelectedItem()),
                CommonLib.convertFilesize((String) jToComboBox.getSelectedItem()),
                CommonLib.convertFilesize((String) jBlockSizeComboBox.getSelectedItem()));
        TreeSet<Integer> data = new TreeSet<Integer>();
        for (int x = 0; x < m.length; x++) {
            if (m[x] > 0) {
                data.add(m[x]);
            }
        }

        // paintScale.add(0, Color.white);
        ArrayList<Color> allColors = allColors();
        Object iData[] = data.toArray();
        paintScale.add(1, allColors.get(0));
        for (int x = 1; x < iData.length - 1; x++) {
            paintScale.add((int) (Integer) iData[x], allColors.get(allColors.size() / iData.length * x));
        }
        paintScale.add((int) (Integer) iData[iData.length - 1], allColors.get(allColors.size() - 1));
    }
    renderer.setPaintScale(paintScale);
    renderer.setBaseToolTipGenerator(new MyXYToolTipGenerator());
    jfcMemory.getXYPlot().setForegroundAlpha(1f);
    jZoomOutAutoRangeButtonActionPerformed(null);
}

From source file:de.zib.scalaris.examples.wikipedia.bliki.WikiServlet.java

/**
 * Creates a {@link WikiPageBean} object with the rendered content of a
 * given revision.//from w  w  w. j  a v  a2 s.  c  om
 * 
 * @param title
 *            the title of the article to render
 * @param result
 *            the revision to render (must be successful and contain a
 *            revision)
 * @param renderer
 *            the renderer to use (0=plain text, 1=Bliki)
 * @param request
 *            the request object
 * @param connection
 *            connection to the database
 * @param page
 *            the bean for the page (the rendered content will be added to
 *            this object)
 * @param noRedirect
 *            if <tt>true</tt>, a redirect will be shown as such, otherwise
 *            the content of the redirected page will be show
 * @param wikiModel
 *            the wiki model to use
 * @param topLevel
 *            if this function is called from inside
 *            {@link #renderRevision()}, this will be <tt>false</tt>,
 *            otherwise always use <tt>true</tt>
 */
private void renderRevision(final String title, final RevisionResult result, final int renderer,
        final HttpServletRequest request, final Connection connection, final WikiPageBean page,
        final boolean noRedirect, final MyWikiModel wikiModel, final boolean topLevel) {
    // set the page's contents according to the renderer used
    // (categories are included in the content string, so they only
    // need special handling the wiki renderer is used)
    NormalisedTitle titleN = NormalisedTitle.fromUnnormalised(title, namespace);
    wikiModel.setNamespaceName(namespace.getNamespaceByNumber(titleN.namespace));
    wikiModel.setPageName(titleN.title);
    if (renderer > 0) {
        String mainText = wikiModel.renderPageWithCache(result.revision.unpackedText());
        if (titleN.namespace.equals(MyNamespace.CATEGORY_NAMESPACE_KEY)) {
            ValueResult<List<NormalisedTitle>> catPagesResult = getPagesInCategory(connection, titleN);
            page.addStats(catPagesResult.stats);
            page.getInvolvedKeys().addAll(catPagesResult.involvedKeys);
            if (catPagesResult.success) {
                final TreeSet<String> subCategories = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
                final TreeSet<String> categoryPages = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
                final List<NormalisedTitle> tplPages = new ArrayList<NormalisedTitle>(
                        catPagesResult.value.size());

                for (NormalisedTitle pageInCat : catPagesResult.value) {
                    if (pageInCat.namespace.equals(MyNamespace.CATEGORY_NAMESPACE_KEY)) {
                        subCategories.add(pageInCat.title);
                    } else if (pageInCat.namespace.equals(MyNamespace.TEMPLATE_NAMESPACE_KEY)) {
                        tplPages.add(pageInCat);
                        categoryPages.add(pageInCat.denormalise(namespace));
                    } else {
                        categoryPages.add(pageInCat.denormalise(namespace));
                    }
                }
                if (!tplPages.isEmpty()) {
                    // all pages using a template are in the category of the template, too
                    ValueResult<List<NormalisedTitle>> tplResult = getPagesInTemplates(connection, tplPages,
                            title);
                    page.addStats(tplResult.stats);
                    page.getInvolvedKeys().addAll(tplResult.involvedKeys);
                    if (tplResult.success) {
                        for (NormalisedTitle pageInTplOfCat : tplResult.value) {
                            if (pageInTplOfCat.namespace.equals(MyNamespace.CATEGORY_NAMESPACE_KEY)) {
                                subCategories.add(pageInTplOfCat.title);
                            } else if (pageInTplOfCat.namespace.equals(MyNamespace.TEMPLATE_NAMESPACE_KEY)) {
                                // TODO: go into recursion?! -> for now, just add the template
                                //                                  tplPages.add(pageInTplOfCat);
                                categoryPages.add(pageInTplOfCat.denormalise(namespace));
                            } else {
                                categoryPages.add(pageInTplOfCat.denormalise(namespace));
                            }
                        }
                    } else {
                        if (tplResult.connect_failed) {
                            setParam_error(request, "ERROR: DB connection failed");
                        } else {
                            setParam_error(request, "ERROR: template page lists unavailable");
                        }
                        addToParam_notice(request, "error getting pages using templates: " + tplResult.message);
                    }
                }
                page.setSubCategories(subCategories);
                page.setCategoryPages(categoryPages);
            } else {
                if (catPagesResult.connect_failed) {
                    setParam_error(request, "ERROR: DB connection failed");
                } else {
                    setParam_error(request, "ERROR: category page list unavailable");
                }
                addToParam_notice(request, "error getting category pages: " + catPagesResult.message);
            }
        }
        page.setTitle(title);
        page.setVersion(result.revision.getId());
        String redirectedPageName = wikiModel.getRedirectLink();
        if (redirectedPageName != null) {
            if (noRedirect) {
                if (topLevel) {
                    page.setContentSub("Redirect page");
                }
                mainText = wikiModel.renderRedirectPage(redirectedPageName);
                page.setDate(Revision.stringToCalendar(result.revision.getTimestamp()));
            } else {
                final String safeTitle = StringEscapeUtils.escapeHtml(title);
                final String redirectUrl = wikiModel.getWikiBaseURL().replace("${title}", title);
                page.setContentSub("(Redirected from <a href=\"" + redirectUrl + "&redirect=no\" title=\""
                        + safeTitle + "\">" + title + "</a>)");
                // add the content from the page directed to:
                wikiModel.tearDown();
                wikiModel.setUp();

                RevisionResult redirectResult = getRevision(connection, redirectedPageName, namespace);
                page.addStats(redirectResult.stats);
                page.getInvolvedKeys().addAll(redirectResult.involvedKeys);
                if (redirectResult.success) {
                    renderRevision(redirectedPageName, redirectResult, renderer, request, connection, page,
                            true, wikiModel, false);
                    return;
                } else {
                    // non-existing/non-successful page is like redirect=no
                    mainText = wikiModel.renderRedirectPage(redirectedPageName);
                    page.setDate(Revision.stringToCalendar(result.revision.getTimestamp()));
                }
            }
        } else {
            setSubPageNav(title, page, wikiModel);
        }
        page.setPage(mainText);
        page.setCategories(wikiModel.getCategories().keySet());
        page.addStats(wikiModel.getStats());
        page.getInvolvedKeys().addAll(wikiModel.getInvolvedKeys());
    } else if (renderer == 0) {
        // for debugging, show all parameters:
        StringBuilder sb = new StringBuilder();
        for (Enumeration<?> req_pars = request.getParameterNames(); req_pars.hasMoreElements();) {
            String element = (String) req_pars.nextElement();
            sb.append(element + " = ");
            sb.append(request.getParameter(element) + "\n");
        }
        sb.append("\n\n");
        for (Enumeration<?> headers = request.getHeaderNames(); headers.hasMoreElements();) {
            String element = (String) headers.nextElement();
            sb.append(element + " = ");
            sb.append(request.getHeader(element) + "\n");
        }
        page.setPage("<p>WikiText:<pre>" + StringEscapeUtils.escapeHtml(result.revision.unpackedText())
                + "</pre></p>" + "<p>Version:<pre>"
                + StringEscapeUtils.escapeHtml(String.valueOf(result.revision.getId())) + "</pre></p>"
                + "<p>Last change:<pre>" + StringEscapeUtils.escapeHtml(result.revision.getTimestamp())
                + "</pre></p>" + "<p>Request Parameters:<pre>" + StringEscapeUtils.escapeHtml(sb.toString())
                + "</pre></p>");
        page.setTitle(title);
        page.setVersion(result.revision.getId());
        page.setDate(Revision.stringToCalendar(result.revision.getTimestamp()));
    }
    page.setNotice(getParam_notice(request));
    page.setError(getParam_error(request));
    page.setWikiTitle(siteinfo.getSitename());
    page.setWikiNamespace(namespace);
}

From source file:elh.eus.absa.Features.java

/**
 * @param lemma//from   w ww  .  j  a v a  2s .co m
 * @return TreeSet<String> containing the unigrams extracted from the opinions. No NLP chain is used.
 *  
 * @deprecated use {@extractWfNgrams(int length, KAFDocument kaf)} instead.  
 */
@Deprecated
public TreeSet<String> extract1gramsOldNoNLPchain(String lemma) {
    TreeSet<String> result = new TreeSet<String>();
    System.err.println("unigram extraction: _" + lemma + "_");
    // Word form unigrams are required
    if (lemma.equalsIgnoreCase("wform")) {
        for (String sent : corpus.getSentences().values()) {
            String[] split = sent.split(" ");
            for (String w : split) {
                String w_nopunct = w.replaceAll("[^\\p{L}\\p{M}\\p{Nd}]", "");
                result.add(w_nopunct);
            }
        }
    }
    return result;
}

From source file:org.dasein.cloud.openstack.nova.os.ext.rackspace.lb.RackspaceLoadBalancers.java

@Override
public @Nonnull String createLoadBalancer(@Nonnull LoadBalancerCreateOptions options)
        throws CloudException, InternalException {
    APITrace.begin(provider, "LB.create");
    try {// ww  w  .  j  a va 2  s  .com
        LbListener[] listeners = options.getListeners();

        if (listeners == null || listeners.length < 1) {
            logger.error("create(): Call failed to specify any listeners");
            throw new CloudException("Rackspace requires exactly one listener");
        }
        HashMap<String, Object> lb = new HashMap<String, Object>();

        lb.put("name", options.getName());
        lb.put("port", listeners[0].getPublicPort());
        if (listeners[0].getNetworkProtocol().equals(LbProtocol.HTTP)) {
            lb.put("protocol", "HTTP");
        } else if (listeners[0].getNetworkProtocol().equals(LbProtocol.HTTPS)) {
            lb.put("protocol", "HTTPS");
        } else if (listeners[0].getNetworkProtocol().equals(LbProtocol.RAW_TCP)) {
            lb.put("protocol", matchProtocol(listeners[0].getPublicPort()));
        } else {
            logger.error("Invalid protocol: " + listeners[0].getNetworkProtocol());
            throw new CloudException("Unsupported protocol: " + listeners[0].getNetworkProtocol());
        }
        if (listeners[0].getAlgorithm().equals(LbAlgorithm.LEAST_CONN)) {
            lb.put("algorithm", "LEAST_CONNECTIONS");
        } else if (listeners[0].getAlgorithm().equals(LbAlgorithm.ROUND_ROBIN)) {
            lb.put("algorithm", "ROUND_ROBIN");
        } else {
            logger.error("create(): Invalid algorithm: " + listeners[0].getAlgorithm());
            throw new CloudException("Unsupported algorithm: " + listeners[0].getAlgorithm());
        }
        ArrayList<Map<String, Object>> ips = new ArrayList<Map<String, Object>>();
        HashMap<String, Object> ip = new HashMap<String, Object>();

        ip.put("type", "PUBLIC");
        ips.add(ip);
        lb.put("virtualIps", ips);

        ArrayList<Map<String, Object>> nodes = new ArrayList<Map<String, Object>>();
        LoadBalancerEndpoint[] endpoints = options.getEndpoints();

        if (endpoints != null) {
            TreeSet<String> addresses = new TreeSet<String>();

            for (LoadBalancerEndpoint endpoint : endpoints) {
                String address = null;

                if (endpoint.getEndpointType().equals(LbEndpointType.IP)) {
                    address = endpoint.getEndpointValue();
                } else {
                    VirtualMachine vm = provider.getComputeServices().getVirtualMachineSupport()
                            .getVirtualMachine(endpoint.getEndpointValue());

                    if (vm != null) {
                        if (vm.getProviderRegionId().equals(provider.getContext().getRegionId())) {
                            RawAddress[] tmp = vm.getPrivateAddresses();

                            if (tmp != null && tmp.length > 0) {
                                address = tmp[0].getIpAddress();
                            }
                        }
                        if (address == null) {
                            RawAddress[] tmp = vm.getPublicAddresses();

                            if (tmp != null && tmp.length > 0) {
                                address = tmp[0].getIpAddress();
                            }
                        }
                    }
                }
                if (address != null && !addresses.contains(address)) {
                    HashMap<String, Object> node = new HashMap<String, Object>();

                    node.put("address", address);
                    node.put("condition", "ENABLED");
                    node.put("port", listeners[0].getPrivatePort());
                    nodes.add(node);
                    addresses.add(address);
                }
            }
        }
        if (nodes.isEmpty()) {
            logger.error("create(): Rackspace requires at least one node assignment");
            throw new CloudException("Rackspace requires at least one node assignment");
        }
        lb.put("nodes", nodes);

        HashMap<String, Object> json = new HashMap<String, Object>();

        json.put("loadBalancer", lb);
        NovaMethod method = new NovaMethod(provider);

        if (logger.isTraceEnabled()) {
            logger.trace("create(): Posting new load balancer data...");
        }
        JSONObject result = method.postString(SERVICE, RESOURCE, null, new JSONObject(json), false);

        if (result == null) {
            logger.error("create(): Method executed successfully, but no load balancer was created");
            throw new CloudException("Method executed successfully, but no load balancer was created");
        }
        try {
            if (result.has("loadBalancer")) {
                JSONObject ob = result.getJSONObject("loadBalancer");

                if (ob != null) {
                    return ob.getString("id");
                }
            }
            logger.error("create(): Method executed successfully, but no load balancer was found in JSON");
            throw new CloudException("Method executed successfully, but no load balancer was found in JSON");
        } catch (JSONException e) {
            logger.error(
                    "create(): Failed to identify a load balancer ID in the cloud response: " + e.getMessage());
            throw new CloudException(
                    "Failed to identify a load balancer ID in the cloud response: " + e.getMessage());
        }
    } finally {
        APITrace.end();
    }
}

From source file:com.joliciel.jochre.graphics.ShapeImpl.java

@Override
public Collection<BridgeCandidate> getBridgeCandidates(double maxBridgeWidth) {
    if (this.bridgeCandidates == null) {
        TreeSet<VerticalLineSegment> lines = this.getVerticalLineSegments();

        // Now, detect "bridges" which could indicate that the shape should be split

        // First, detect which spaces are "enclosed" and which touch the outer walls
        // To do this, build up a set of all inverse (white) lines
        TreeSet<VerticalLineSegment> inverseLines = new TreeSet<VerticalLineSegment>();
        int currentX = -1;
        VerticalLineSegment previousLine = null;
        for (VerticalLineSegment line : lines) {
            //LOG.debug("Checking line x = " + line.x + ", top = " + line.yTop + ", bottom = " + line.yBottom);
            if (line.x != currentX) {
                // new x-coordinate
                if (previousLine != null && previousLine.yBottom < this.getHeight() - 1) {
                    VerticalLineSegment inverseLine = new VerticalLineSegment(previousLine.x,
                            previousLine.yBottom + 1);
                    inverseLine.yBottom = this.getHeight() - 1;
                    inverseLines.add(inverseLine);
                    //LOG.debug("Adding inverse line x = " + inverseLine.x + ", top = " + inverseLine.yTop + ", bottom = " + inverseLine.yBottom);
                }/*  ww w  .  j  ava  2  s. c  o  m*/
                if (line.yTop > 0) {
                    VerticalLineSegment inverseLine = new VerticalLineSegment(line.x, line.yTop - 1);
                    inverseLine.yTop = 0;
                    inverseLines.add(inverseLine);
                    //LOG.debug("Adding inverse line x = " + inverseLine.x + ", top = " + inverseLine.yTop + ", bottom = " + inverseLine.yBottom);
                }
                currentX = line.x;
            } else if (previousLine != null) {
                VerticalLineSegment inverseLine = new VerticalLineSegment(previousLine.x,
                        previousLine.yBottom + 1);
                inverseLine.yBottom = line.yTop - 1;
                inverseLines.add(inverseLine);
                //LOG.debug("Adding inverse line x = " + inverseLine.x + ", top = " + inverseLine.yTop + ", bottom = " + inverseLine.yBottom);
            }
            previousLine = line;
        }
        if (previousLine != null && previousLine.yBottom < this.getHeight() - 1) {
            VerticalLineSegment inverseLine = new VerticalLineSegment(previousLine.x, previousLine.yBottom + 1);
            inverseLine.yBottom = this.getHeight() - 1;
            inverseLines.add(inverseLine);
            //LOG.debug("Adding inverse line x = " + inverseLine.x + ", top = " + inverseLine.yTop + ", bottom = " + inverseLine.yBottom);
        }
        LOG.debug("inverseLines size: " + inverseLines.size());

        // Calculate neighbours for inverse lines
        for (VerticalLineSegment inverseLine : inverseLines) {
            for (VerticalLineSegment otherLine : inverseLines) {
                if (otherLine.x == inverseLine.x + 1) {
                    if (inverseLine.yTop - 1 <= otherLine.yBottom
                            && otherLine.yTop <= inverseLine.yBottom + 1) {
                        inverseLine.rightSegments.add(otherLine);
                        otherLine.leftSegments.add(inverseLine);
                    }
                }
                if (otherLine.x == inverseLine.x - 1) {
                    if (inverseLine.yTop - 1 <= otherLine.yBottom
                            && otherLine.yTop <= inverseLine.yBottom + 1) {
                        inverseLine.leftSegments.add(otherLine);
                        otherLine.rightSegments.add(inverseLine);
                    }
                }
            }
        }

        // Eliminate any white lines which somehow touch an edge
        Stack<VerticalLineSegment> lineStack = new Stack<VerticalLineSegment>();
        Set<VerticalLineSegment> outerInverseLines = new HashSet<VerticalLineSegment>();
        for (VerticalLineSegment inverseLine : inverseLines) {
            if (inverseLine.yTop == 0 || inverseLine.x == 0 || inverseLine.yBottom == this.getHeight() - 1
                    || inverseLine.x == this.getWidth() - 1)
                lineStack.push(inverseLine);
        }
        while (!lineStack.isEmpty()) {
            VerticalLineSegment inverseLine = lineStack.pop();
            if (!inverseLine.touched) {
                inverseLine.touched = true;
                outerInverseLines.add(inverseLine);
                //LOG.debug("Outer inverse line x = " + inverseLine.x + ", top = " + inverseLine.yTop + ", bottom = " + inverseLine.yBottom);

                for (VerticalLineSegment rightLine : inverseLine.rightSegments)
                    lineStack.push(rightLine);
                for (VerticalLineSegment leftLine : inverseLine.leftSegments) {
                    lineStack.push(leftLine);
                }
            }
        }
        LOG.debug("outerInverseLines size: " + outerInverseLines.size());

        Set<VerticalLineSegment> enclosedInverseLines = new HashSet<VerticalLineSegment>(inverseLines);
        enclosedInverseLines.removeAll(outerInverseLines);
        LOG.debug("enclosedInverseLines.size: " + enclosedInverseLines.size());
        if (LOG.isDebugEnabled()) {
            for (VerticalLineSegment inverseLine : enclosedInverseLines)
                LOG.debug("Enclosed inverse line x = " + inverseLine.x + ", top = " + inverseLine.yTop
                        + ", bottom = " + inverseLine.yBottom);
        }

        // Add bridge candidates
        // based on maximum line length and having exactly one neighbour on each side      
        LOG.debug("Adding bridge candidates");
        List<BridgeCandidate> candidateList = new ArrayList<BridgeCandidate>();
        for (VerticalLineSegment line : lines) {
            if (line.rightSegments.size() == 1 && line.leftSegments.size() == 1
                    && line.length() <= maxBridgeWidth) {
                // also the bridge width should be considered where two vertical lines touch each other
                // rather than for the full length of the line
                BridgeCandidate candidate = null;
                VerticalLineSegment rightLine = line.rightSegments.iterator().next();
                VerticalLineSegment leftLine = line.leftSegments.iterator().next();
                int leftTopTouch = (leftLine.yTop > line.yTop ? leftLine.yTop : line.yTop);
                int leftBottomTouch = (leftLine.yBottom < line.yBottom ? leftLine.yBottom : line.yBottom);
                int rightTopTouch = (rightLine.yTop > line.yTop ? rightLine.yTop : line.yTop);
                int rightBottomTouch = (rightLine.yBottom < line.yBottom ? rightLine.yBottom : line.yBottom);

                int rightLength = rightTopTouch - rightBottomTouch;
                int leftLength = leftTopTouch - leftBottomTouch;

                if (line.length() <= maxBridgeWidth || rightLength <= maxBridgeWidth
                        || leftLength <= maxBridgeWidth) {
                    candidate = new BridgeCandidate(this, line);

                    if (rightLength < leftLength && rightLength < line.length()) {
                        candidate.topTouch = rightTopTouch;
                        candidate.bottomTouch = rightBottomTouch;
                    } else if (leftLength < line.length()) {
                        candidate.topTouch = leftTopTouch;
                        candidate.bottomTouch = leftBottomTouch;
                    }
                    LOG.debug("Adding bridge candidate x = " + candidate.x + ", top = " + candidate.yTop
                            + ", bottom = " + candidate.yBottom);
                    candidateList.add(candidate);
                }
            }
        }
        LOG.debug("Bridge candidate size: " + candidateList.size());

        LOG.debug("Eliminating candidates with shorter neighbor");
        Set<BridgeCandidate> candidatesToEliminate = null;
        if (candidateList.size() > 0) {
            // eliminate any bridge candidates that touch a shorter bridge candidate
            candidatesToEliminate = new HashSet<BridgeCandidate>();
            for (int i = 0; i < candidateList.size() - 1; i++) {
                BridgeCandidate candidate = candidateList.get(i);
                for (int j = i + 1; j < candidateList.size(); j++) {
                    BridgeCandidate otherCandidate = candidateList.get(j);
                    if (otherCandidate.x == candidate.x + 1
                            && candidate.rightSegments.contains(otherCandidate)) {
                        if ((candidate.bridgeWidth()) <= (otherCandidate.bridgeWidth())) {
                            LOG.debug("Eliminating candidate x = " + otherCandidate.x + ", top = "
                                    + otherCandidate.yTop + ", bottom = " + otherCandidate.yBottom);
                            candidatesToEliminate.add(otherCandidate);
                        } else {
                            LOG.debug("Eliminating candidate x = " + candidate.x + ", top = " + candidate.yTop
                                    + ", bottom = " + candidate.yBottom);
                            candidatesToEliminate.add(candidate);
                        }
                    }
                }
            }
            candidateList.removeAll(candidatesToEliminate);

            LOG.debug("Bridge candidate size: " + candidateList.size());

            // To be a bridge, three additional things have to be true:
            // (A) intersection between right & left shape = null
            // (B) weight of right shape & weight of left shape > a certain threshold
            // (C) little overlap right boundary of left shape, left boundary of right shape

            LOG.debug("Eliminating candidates touching enclosed space");
            // (A) intersection between right & left shape = null
            // Intersection between right and left shape is non-null
            // if the line segment X touches an enclosed space immediately above or below
            candidatesToEliminate = new HashSet<BridgeCandidate>();
            for (BridgeCandidate candidate : candidateList) {
                boolean nullIntersection = true;
                for (VerticalLineSegment inverseLine : enclosedInverseLines) {
                    if (candidate.x == inverseLine.x) {
                        if (inverseLine.yBottom == candidate.yTop - 1
                                || inverseLine.yTop == candidate.yBottom + 1) {
                            nullIntersection = false;
                            break;
                        }
                    }
                }
                if (!nullIntersection) {
                    LOG.debug("Eliminating candidate x = " + candidate.x + ", top = " + candidate.yTop
                            + ", bottom = " + candidate.yBottom);
                    candidatesToEliminate.add(candidate);
                }
            }
            candidateList.removeAll(candidatesToEliminate);
            LOG.debug("Remaining bridge candidate size: " + candidateList.size());

            // another criterion for avoiding "false splits" is that on both side of the bridge
            // the shapes pretty rapidly expand in width both up and down
            LOG.debug("Eliminating candidates without vertical expansion on both sides");
            candidatesToEliminate = new HashSet<BridgeCandidate>();
            int expansionLimit = (int) Math.ceil(((double) this.getWidth()) / 6.0);
            for (BridgeCandidate candidate : candidateList) {
                // take into account the portion touching on the right or left
                boolean isCandidate = true;
                Stack<VerticalLineSegment> leftLines = new Stack<VerticalLineSegment>();
                Stack<Integer> leftDepths = new Stack<Integer>();
                leftLines.push(candidate);
                leftDepths.push(0);
                int leftTop = candidate.topTouch;
                int leftBottom = candidate.bottomTouch;
                while (!leftLines.isEmpty()) {
                    VerticalLineSegment line = leftLines.pop();
                    int depth = leftDepths.pop();
                    if (line.yTop < leftTop)
                        leftTop = line.yTop;
                    if (line.yBottom > leftBottom)
                        leftBottom = line.yBottom;
                    if (depth <= expansionLimit) {
                        for (VerticalLineSegment leftSegment : line.leftSegments) {
                            leftLines.push(leftSegment);
                            leftDepths.push(depth + 1);
                        }
                    }
                }
                if (leftTop == candidate.topTouch || leftBottom == candidate.bottomTouch)
                    isCandidate = false;
                if (isCandidate) {
                    Stack<VerticalLineSegment> rightLines = new Stack<VerticalLineSegment>();
                    Stack<Integer> rightDepths = new Stack<Integer>();
                    rightLines.push(candidate);
                    rightDepths.push(0);
                    int rightTop = candidate.topTouch;
                    int rightBottom = candidate.bottomTouch;
                    while (!rightLines.isEmpty()) {
                        VerticalLineSegment line = rightLines.pop();
                        int depth = rightDepths.pop();
                        if (line.yTop < rightTop)
                            rightTop = line.yTop;
                        if (line.yBottom > rightBottom)
                            rightBottom = line.yBottom;
                        if (depth <= expansionLimit) {
                            for (VerticalLineSegment rightSegment : line.rightSegments) {
                                rightLines.push(rightSegment);
                                rightDepths.push(depth + 1);
                            }
                        }
                    }
                    if (rightTop == candidate.topTouch || rightBottom == candidate.bottomTouch)
                        isCandidate = false;
                }
                if (!isCandidate) {
                    LOG.debug("Eliminating candidate x = " + candidate.x + ", top = " + candidate.yTop
                            + ", bottom = " + candidate.yBottom);
                    candidatesToEliminate.add(candidate);
                }
            }
            candidateList.removeAll(candidatesToEliminate);
            LOG.debug("Remaining bridge candidate size: " + candidateList.size());

            if (LOG.isDebugEnabled()) {
                for (VerticalLineSegment candidate : candidateList) {
                    LOG.debug("Remaining candidate x = " + candidate.x + ", top = " + candidate.yTop
                            + ", bottom = " + candidate.yBottom);
                }
            }
        }

        if (candidateList.size() > 0) {
            // (B) weight of right shape & weight of left shape > a certain threshold
            // (C) little overlap right boundary of left shape, left boundary of right shape
            // 
            // We can now divide the shape into n groups, each separated by a candidate
            // We recursively build a group until we reach a candidate
            // and indicate whether it's the right or left border of the candidate.
            // We then keep going from the candidate on to the next one
            // We keep tab of the size of each group and of its right & left boundaries
            // at the end we can easily determine the right and left boundaries of each,
            // as well as the right & left pixel weight
            List<VerticalLineGroup> groups = new ArrayList<VerticalLineGroup>();

            VerticalLineSegment firstLine = lines.first();
            lineStack = new Stack<VerticalLineSegment>();
            Stack<BridgeCandidate> candidateStack = new Stack<BridgeCandidate>();
            Stack<Boolean> fromLeftStack = new Stack<Boolean>();
            Stack<Boolean> candidateFromLeftStack = new Stack<Boolean>();
            lineStack.push(firstLine);
            fromLeftStack.push(true);
            VerticalLineGroup group = new VerticalLineGroup(this);
            List<BridgeCandidate> touchedCandidates = new ArrayList<BridgeCandidate>();
            while (!lineStack.isEmpty()) {
                while (!lineStack.isEmpty()) {
                    VerticalLineSegment line = lineStack.pop();
                    boolean fromLeft = fromLeftStack.pop();
                    if (line.touched)
                        continue;

                    line.touched = true;
                    if (candidateList.contains(line)) {
                        // a candidate!
                        LOG.debug("Touching candidate x = " + line.x + ", top = " + line.yTop + ", bottom = "
                                + line.yBottom);
                        BridgeCandidate candidate = null;
                        for (BridgeCandidate existingCandidate : candidateList) {
                            if (existingCandidate.equals(line)) {
                                candidate = existingCandidate;
                                break;
                            }
                        }

                        boolean foundCandidate = touchedCandidates.contains(candidate);

                        if (!foundCandidate) {
                            touchedCandidates.add(candidate);
                            candidateStack.push(candidate);
                            candidateFromLeftStack.push(fromLeft);
                            if (fromLeft) {
                                // coming from the left
                                group.rightCandidates.add(candidate);
                                candidate.leftGroup = group;
                            } else {
                                group.leftCandidates.add(candidate);
                                candidate.rightGroup = group;
                            }
                        }
                    } else {
                        // not a candidate
                        LOG.debug("Touching line length = " + line.length() + ", x = " + line.x + ", top = "
                                + line.yTop + ", bottom = " + line.yBottom);
                        group.pixelCount += line.length();
                        if (line.x < group.leftBoundary)
                            group.leftBoundary = line.x;
                        if (line.x > group.rightBoundary)
                            group.rightBoundary = line.x;
                        if (line.yTop < group.topBoundary)
                            group.topBoundary = line.yTop;
                        if (line.yBottom > group.bottomBoundary)
                            group.bottomBoundary = line.yBottom;
                        for (VerticalLineSegment leftLine : line.leftSegments) {
                            lineStack.push(leftLine);
                            fromLeftStack.push(false);
                        }
                        for (VerticalLineSegment rightLine : line.rightSegments) {
                            lineStack.push(rightLine);
                            fromLeftStack.push(true);
                        }
                    }
                } // no more lines in this group
                groups.add(group);
                if (!candidateStack.isEmpty()) {
                    BridgeCandidate candidate = candidateStack.pop();
                    boolean fromLeft = candidateFromLeftStack.pop();
                    //lineStack.push(candidate.line);
                    //fromLeftStack.push(fromLeft);
                    LOG.debug("*** New Group ***");
                    LOG.debug("Next candidate:  x = " + candidate.x + ", top = " + candidate.yTop
                            + ", bottom = " + candidate.yBottom);
                    group = new VerticalLineGroup(this);
                    if (fromLeft) {
                        group.leftCandidates.add(candidate);
                        candidate.rightGroup = group;
                    } else {
                        group.rightCandidates.add(candidate);
                        candidate.leftGroup = group;
                    }

                    // add this candidate's neighbours to the lineStack
                    for (VerticalLineSegment leftLine : candidate.leftSegments) {
                        lineStack.push(leftLine);
                        fromLeftStack.push(false);
                    }
                    for (VerticalLineSegment rightLine : candidate.rightSegments) {
                        lineStack.push(rightLine);
                        fromLeftStack.push(true);
                    }
                } // next candidate on candidate stack
            } // no more lines to process

            if (LOG.isDebugEnabled()) {
                LOG.debug("Found " + groups.size() + " groups");
                int i = 1;
                for (VerticalLineGroup aGroup : groups) {
                    LOG.debug("Group " + i++ + ", pixelCount: " + aGroup.pixelCount + ", leftBoundary: "
                            + aGroup.leftBoundary + ", rightBoundary: " + aGroup.rightBoundary);
                    LOG.debug("Candidates on left: ");
                    for (BridgeCandidate candidate : aGroup.leftCandidates)
                        LOG.debug("Candidate x = " + candidate.x + ", top = " + candidate.yTop + ", bottom = "
                                + candidate.yBottom);
                    LOG.debug("Candidates on right: ");
                    for (BridgeCandidate candidate : aGroup.rightCandidates)
                        LOG.debug("Candidate x = " + candidate.x + ", top = " + candidate.yTop + ", bottom = "
                                + candidate.yBottom);

                }
                LOG.debug("Found " + candidateList.size() + " candidates");
                for (BridgeCandidate candidate : candidateList) {
                    LOG.debug("Candidate x = " + candidate.x + ", top = " + candidate.yTop + ", bottom = "
                            + candidate.yBottom);
                    LOG.debug("- Left group = pixelCount: " + candidate.leftGroup.pixelCount
                            + ", leftBoundary: " + candidate.leftGroup.leftBoundary + ", rightBoundary: "
                            + candidate.leftGroup.rightBoundary);
                    LOG.debug("- Right group = pixelCount: " + candidate.rightGroup.pixelCount
                            + ", leftBoundary: " + candidate.rightGroup.leftBoundary + ", rightBoundary: "
                            + candidate.rightGroup.rightBoundary);
                }
            } // should we log?

            // calculate each candidate's pixel totals and boundaries
            for (BridgeCandidate candidate : candidateList) {
                for (VerticalLineGroup lineGroup : groups)
                    lineGroup.touched = false;
                Stack<VerticalLineGroup> groupStack = new Stack<VerticalLineGroup>();
                groupStack.push(candidate.leftGroup);
                while (!groupStack.isEmpty()) {
                    VerticalLineGroup lineGroup = groupStack.pop();
                    if (lineGroup.touched)
                        continue;
                    lineGroup.touched = true;
                    candidate.leftPixels += lineGroup.pixelCount;
                    if (lineGroup.leftBoundary < candidate.leftShapeLeftBoundary)
                        candidate.leftShapeLeftBoundary = lineGroup.leftBoundary;
                    if (lineGroup.rightBoundary > candidate.leftShapeRightBoundary)
                        candidate.leftShapeRightBoundary = lineGroup.rightBoundary;
                    for (BridgeCandidate leftCandidate : lineGroup.leftCandidates) {
                        if (!candidate.equals(leftCandidate)) {
                            candidate.leftPixels += leftCandidate.length();
                            groupStack.push(leftCandidate.leftGroup);
                        }
                    }
                    for (BridgeCandidate rightCandidate : lineGroup.rightCandidates) {
                        if (!candidate.equals(rightCandidate)) {
                            candidate.leftPixels += rightCandidate.length();
                            groupStack.push(rightCandidate.rightGroup);
                        }
                    }
                } // next left group
                groupStack.push(candidate.rightGroup);
                while (!groupStack.isEmpty()) {
                    VerticalLineGroup lineGroup = groupStack.pop();
                    if (lineGroup.touched)
                        continue;
                    lineGroup.touched = true;
                    candidate.rightPixels += lineGroup.pixelCount;
                    if (lineGroup.leftBoundary < candidate.rightShapeLeftBoundary)
                        candidate.rightShapeLeftBoundary = lineGroup.leftBoundary;
                    if (lineGroup.rightBoundary > candidate.rightShapeRightBoundary)
                        candidate.rightShapeRightBoundary = lineGroup.rightBoundary;
                    for (BridgeCandidate leftCandidate : lineGroup.leftCandidates) {
                        if (!candidate.equals(leftCandidate)) {
                            candidate.rightPixels += leftCandidate.length();
                            groupStack.push(leftCandidate.leftGroup);
                        }
                    }
                    for (BridgeCandidate rightCandidate : lineGroup.rightCandidates) {
                        if (!candidate.equals(rightCandidate)) {
                            candidate.rightPixels += rightCandidate.length();
                            groupStack.push(rightCandidate.rightGroup);
                        }
                    }
                } // next right group
            } // next candidate

        } // do we have any candidates?
        this.bridgeCandidates = candidateList;
    } // lazy load

    return this.bridgeCandidates;
}

From source file:net.morimekta.providence.maven.plugin.BaseGenerateSourcesMojo.java

boolean executeInternal(File outputDir, IncludeExcludeFileSelector files, String defaultInputIncludes,
        boolean testCompile) throws MojoExecutionException, MojoFailureException {

    Set<File> inputs = ProvidenceInput.getInputFiles(project, files, defaultInputIncludes);
    if (inputs.isEmpty()) {
        return false;
    }/*  w ww. ja  va 2  s.c om*/

    if (!outputDir.exists()) {
        if (!outputDir.mkdirs()) {
            throw new MojoExecutionException("Unable to create target directory " + outputDir);
        }
    }

    TreeSet<File> includes = new TreeSet<>();

    File workingDir = new File(buildDir, testCompile ? "providence-test" : "providence");
    File[] deleteFiles = workingDir.listFiles();
    if (!workingDir.exists()) {
        if (!workingDir.mkdirs()) {
            throw new MojoExecutionException("Unable to create working directory " + workingDir);
        }
    } else if (deleteFiles != null) {
        StreamSupport.<File>stream(
                Spliterators.spliterator(deleteFiles, Spliterator.DISTINCT | Spliterator.IMMUTABLE), false)
                .forEach(File::delete);
    }

    Set<Artifact> resolvedArtifacts = new HashSet<>();
    for (Dependency dep : dependencies) {
        dep.setType(ProvidenceAssemblyMojo.TYPE);
        if (dep.getClassifier() == null || dep.getClassifier().isEmpty()) {
            dep.setClassifier(ProvidenceAssemblyMojo.CLASSIFIER);
        }

        Artifact artifact = repositorySystem.createDependencyArtifact(dep);
        // Avoid resolving stuff we already have resolved.
        if (resolvedArtifacts.contains(artifact)) {
            continue;
        }

        ArtifactResolutionRequest request = new ArtifactResolutionRequest();
        request.setLocalRepository(localRepository);
        request.setRemoteRepositories(remoteRepositories);
        request.setResolveTransitively(false);
        request.setArtifact(artifact);

        ArtifactResolutionResult result = artifactResolver.resolve(request);

        boolean found = false;
        for (Artifact resolved : result.getArtifacts()) {
            if (artifact.equals(resolved)) {
                resolvedArtifacts.add(resolved);
                addDependencyInclude(workingDir, includes, resolved);
                found = true;
                break;
            }
        }
        if (!found) {
            throw new MojoFailureException("Unable to resolve providence dependency: " + artifact.getGroupId()
                    + ":" + artifact.getArtifactId() + ":" + artifact.getVersion() + ":"
                    + artifact.getClassifier());
        }
    }

    if (includeDirs != null) {
        DirectoryScanner includeScanner = new DirectoryScanner();
        includeScanner.setIncludes(includeDirs.getIncludes());
        if (includeDirs.getExcludes() != null) {
            includeScanner.setExcludes(includeDirs.getExcludes());
        }
        includeScanner.setBasedir(project.getBasedir());
        includeScanner.scan();
        for (String dir : includeScanner.getIncludedDirectories()) {
            includes.add(new File(project.getBasedir(), dir));
        }
        for (String dir : includeScanner.getExcludedDirectories()) {
            includes.remove(new File(project.getBasedir(), dir));
        }
    }
    inputs.stream().map(File::getParentFile).forEach(includes::add);

    FileManager fileManager = new FileManager(outputDir);
    DocumentParser parser = new ThriftDocumentParser();
    TypeLoader loader = new TypeLoader(includes, parser);

    LinkedList<CDocument> documents = new LinkedList<>();

    for (File in : inputs) {
        try {
            documents.add(loader.load(in));
        } catch (IOException e) {
            throw new MojoExecutionException("Failed to read thrift file: " + in.getName(), e);
        } catch (ParseException e) {
            getLog().warn(e.getMessage());
            getLog().warn(".---------------------.");
            throw new MojoFailureException("Failed to parse thrift file: " + in.getName(), e);
        }
    }

    try {
        Generator generator;
        if (tiny) {
            TinyOptions options = new TinyOptions();
            options.jackson = jackson;
            if (android) {
                throw new MojoExecutionException("Android option not compatible with 'tiny_java' variant.");
            }
            generator = new TinyGenerator(fileManager, loader.getRegistry(), options);
        } else {
            JOptions options = new JOptions();
            options.android = android;
            if (jackson) {
                throw new MojoExecutionException("Jackson option not compatible with 'java' variant.");
            }
            generator = new JGenerator(fileManager, loader.getRegistry(), options);
        }

        for (CDocument doc : documents) {
            try {
                generator.generate(doc);
            } catch (IOException e) {
                throw new MojoExecutionException("Failed to write document: " + doc.getPackageName(), e);
            } catch (GeneratorException e) {
                getLog().warn(e.getMessage());
                throw new MojoFailureException("Failed to generate document: " + doc.getPackageName(), e);
            }
        }
    } catch (GeneratorException e) {
        getLog().warn(e.getMessage());
        throw new MojoFailureException("Failed to generate file: " + e.getMessage(), e);
    }

    return compileOutput;
}