Example usage for java.util TreeSet iterator

List of usage examples for java.util TreeSet iterator

Introduction

In this page you can find the example usage for java.util TreeSet iterator.

Prototype

public Iterator<E> iterator() 

Source Link

Document

Returns an iterator over the elements in this set in ascending order.

Usage

From source file:ee.sk.digidoc.factory.SAXDigiDocFactory.java

private byte[] addNamespaces(byte[] bCanInfo, boolean bDsNs, boolean bEtsiNs, String dsNsPref,
        String xadesNsPref, boolean bAsicNs, String asicPref) {
    byte[] bInfo = bCanInfo;
    try {/*from   w  ww  .j a  va  2 s . co m*/
        String s1 = new String(bCanInfo, "UTF-8");
        if (m_logger.isDebugEnabled())
            m_logger.debug("Input xml:\n------\n" + s1 + "\n------\n");
        TreeSet tsOtherAttr = new TreeSet();
        TreeSet tsNs = collectNamespaces(s1, tsOtherAttr);
        Iterator iNs = tsNs.iterator();
        while (iNs.hasNext()) {
            String s = (String) iNs.next();
            m_logger.debug("Has ns: " + s);
        }
        iNs = tsOtherAttr.iterator();
        while (iNs.hasNext()) {
            String s = (String) iNs.next();
            m_logger.debug("Other attr: " + s);
        }
        if (bDsNs)
            addNamespaceIfMissing(tsNs, xmlnsDs, dsNsPref);
        if (bEtsiNs)
            addNamespaceIfMissing(tsNs, xmlnsEtsi, xadesNsPref);
        if (bAsicNs)
            addNamespaceIfMissing(tsNs, xmlnsAsic, asicPref);
        iNs = tsNs.iterator();
        while (iNs.hasNext()) {
            String s = (String) iNs.next();
            m_logger.debug("Now has ns: " + s);
        }
        // put back in header
        int p1 = s1.indexOf(' ');
        int p2 = s1.indexOf('>');
        if (p1 > p2)
            p1 = p2; // if <SignedInfo> has no atributes
        String sRest = s1.substring(p2);
        StringBuffer sb = new StringBuffer();
        sb.append(s1.substring(0, p1));
        iNs = tsNs.iterator();
        while (iNs.hasNext()) {
            sb.append(" ");
            String s = (String) iNs.next();
            sb.append(s);
        }
        iNs = tsOtherAttr.iterator();
        while (iNs.hasNext()) {
            sb.append(" ");
            String s = (String) iNs.next();
            sb.append(s);
        }
        sb.append(sRest);
        bInfo = sb.toString().getBytes("UTF-8");
        if (m_logger.isDebugEnabled())
            m_logger.debug("Modified xml:\n------\n" + sb.toString() + "\n------\n");
    } catch (Exception ex) {
        m_logger.error("Error adding namespaces: " + ex);
    }
    return bInfo; // default is to return original content
}

From source file:net.semanticmetadata.lire.solr.LireRequestHandler.java

/**
 * Actual search implementation based on (i) hash based retrieval and (ii) feature based re-ranking.
 *
 * @param rsp//from  w w  w .  java  2s .  c  om
 * @param searcher
 * @param hashFieldName the hash field name
 * @param maximumHits
 * @param terms
 * @param queryFeature
 * @throws IOException
 * @throws IllegalAccessException
 * @throws InstantiationException
 */
private void doSearch(SolrQueryRequest req, SolrQueryResponse rsp, SolrIndexSearcher searcher,
        String hashFieldName, int maximumHits, List<Term> terms, Query query, LireFeature queryFeature)
        throws IOException, IllegalAccessException, InstantiationException {
    // temp feature instance
    LireFeature tmpFeature = queryFeature.getClass().newInstance();
    // Taking the time of search for statistical purposes.
    time = System.currentTimeMillis();

    Filter filter = null;
    // if the request contains a filter:
    if (req.getParams().get("fq") != null) {
        // only filters with [<field>:<value> ]+ are supported
        StringTokenizer st = new StringTokenizer(req.getParams().get("fq"), " ");
        LinkedList<Term> filterTerms = new LinkedList<Term>();
        while (st.hasMoreElements()) {
            String[] tmpToken = st.nextToken().split(":");
            if (tmpToken.length > 1) {
                filterTerms.add(new Term(tmpToken[0], tmpToken[1]));
            }
        }
        if (filterTerms.size() > 0)
            filter = new TermsFilter(filterTerms);
    }

    TopDocs docs; // with query only.
    if (filter == null) {
        docs = searcher.search(query, numberOfCandidateResults);
    } else {
        docs = searcher.search(query, filter, numberOfCandidateResults);
    }
    //        TopDocs docs = searcher.search(query, new TermsFilter(terms), numberOfCandidateResults);   // with TermsFilter and boosting by simple query
    //        TopDocs docs = searcher.search(new ConstantScoreQuery(new TermsFilter(terms)), numberOfCandidateResults); // just with TermsFilter
    time = System.currentTimeMillis() - time;
    rsp.add("RawDocsCount", docs.scoreDocs.length + "");
    rsp.add("RawDocsSearchTime", time + "");
    // re-rank
    time = System.currentTimeMillis();
    TreeSet<SimpleResult> resultScoreDocs = new TreeSet<SimpleResult>();
    float maxDistance = -1f;
    float tmpScore;

    String featureFieldName = FeatureRegistry.getFeatureFieldName(hashFieldName);
    // iterating and re-ranking the documents.
    BinaryDocValues binaryValues = MultiDocValues.getBinaryValues(searcher.getIndexReader(), featureFieldName); // ***  #
    BytesRef bytesRef;// = new BytesRef();
    for (int i = 0; i < docs.scoreDocs.length; i++) {
        // using DocValues to retrieve the field values ...
        bytesRef = binaryValues.get(docs.scoreDocs[i].doc);
        tmpFeature.setByteArrayRepresentation(bytesRef.bytes, bytesRef.offset, bytesRef.length);
        // Getting the document from the index.
        // This is the slow step based on the field compression of stored fields.
        //            tmpFeature.setByteArrayRepresentation(d.getBinaryValue(name).bytes, d.getBinaryValue(name).offset, d.getBinaryValue(name).length);
        tmpScore = queryFeature.getDistance(tmpFeature);
        if (resultScoreDocs.size() < maximumHits) { // todo: There's potential here for a memory saver, think of a clever data structure that can do the trick without creating a new SimpleResult for each result.
            resultScoreDocs.add(
                    new SimpleResult(tmpScore, searcher.doc(docs.scoreDocs[i].doc), docs.scoreDocs[i].doc));
            maxDistance = resultScoreDocs.last().getDistance();
        } else if (tmpScore < maxDistance) {
            //                if it is nearer to the sample than at least one of the current set:
            //                remove the last one ...
            resultScoreDocs.remove(resultScoreDocs.last());
            //                add the new one ...
            resultScoreDocs.add(
                    new SimpleResult(tmpScore, searcher.doc(docs.scoreDocs[i].doc), docs.scoreDocs[i].doc));
            //                and set our new distance border ...
            maxDistance = resultScoreDocs.last().getDistance();
        }
    }
    //        System.out.println("** Creating response.");
    time = System.currentTimeMillis() - time;
    rsp.add("ReRankSearchTime", time + "");
    LinkedList list = new LinkedList();
    for (Iterator<SimpleResult> it = resultScoreDocs.iterator(); it.hasNext();) {
        SimpleResult result = it.next();
        HashMap m = new HashMap(2);
        m.put("d", result.getDistance());
        // add fields as requested:
        if (req.getParams().get("fl") == null) {
            m.put("id", result.getDocument().get("id"));
            if (result.getDocument().get("title") != null)
                m.put("title", result.getDocument().get("title"));
        } else {
            String fieldsRequested = req.getParams().get("fl");
            if (fieldsRequested.contains("score")) {
                m.put("score", result.getDistance());
            }
            if (fieldsRequested.contains("*")) {
                // all fields
                for (IndexableField field : result.getDocument().getFields()) {
                    String tmpField = field.name();
                    if (result.getDocument().getFields(tmpField).length > 1) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getValues(tmpField));
                    } else if (result.getDocument().getFields(tmpField).length > 0) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getFields(tmpField)[0].stringValue());
                    }
                }
            } else {
                StringTokenizer st;
                if (fieldsRequested.contains(","))
                    st = new StringTokenizer(fieldsRequested, ",");
                else
                    st = new StringTokenizer(fieldsRequested, " ");
                while (st.hasMoreElements()) {
                    String tmpField = st.nextToken();
                    if (result.getDocument().getFields(tmpField).length > 1) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getValues(tmpField));
                    } else if (result.getDocument().getFields(tmpField).length > 0) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getFields(tmpField)[0].stringValue());
                    }
                }
            }
        }
        //            m.put(field, result.getDocument().get(field));
        //            m.put(field.replace("_ha", "_hi"), result.getDocument().getBinaryValue(field));
        list.add(m);
    }
    rsp.add("docs", list);
    // rsp.add("Test-name", "Test-val");
}

From source file:net.semanticmetadata.lire.solr.FastLireRequestHandler.java

/**
 * Actual search implementation based on (i) hash based retrieval and (ii) feature based re-ranking.
 *
 * @param rsp/* ww  w.j  av  a2 s  .c  o  m*/
 * @param searcher
 * @param hashFieldName the hash field name
 * @param maximumHits
 * @param terms
 * @param queryFeature
 * @throws java.io.IOException
 * @throws IllegalAccessException
 * @throws InstantiationException
 */
private void doSearch(SolrQueryRequest req, SolrQueryResponse rsp, SolrIndexSearcher searcher,
        String hashFieldName, int maximumHits, List<Term> terms, Query query, LireFeature queryFeature)
        throws IOException, IllegalAccessException, InstantiationException {
    // temp feature instance
    LireFeature tmpFeature = queryFeature.getClass().newInstance();
    // Taking the time of search for statistical purposes.
    time = System.currentTimeMillis();

    Filter filter = null;
    // if the request contains a filter:
    if (req.getParams().get("fq") != null) {
        // only filters with [<field>:<value> ]+ are supported
        StringTokenizer st = new StringTokenizer(req.getParams().get("fq"), " ");
        LinkedList<Term> filterTerms = new LinkedList<Term>();
        while (st.hasMoreElements()) {
            String[] tmpToken = st.nextToken().split(":");
            if (tmpToken.length > 1) {
                filterTerms.add(new Term(tmpToken[0], tmpToken[1]));
            }
        }
        if (filterTerms.size() > 0)
            filter = new TermsFilter(filterTerms);
    }

    TopDocs docs; // with query only.
    if (filter == null) {
        docs = searcher.search(query, numberOfCandidateResults);
    } else {
        docs = searcher.search(query, filter, numberOfCandidateResults);
    }
    //        TopDocs docs = searcher.search(query, new TermsFilter(terms), numberOfCandidateResults);   // with TermsFilter and boosting by simple query
    //        TopDocs docs = searcher.search(new ConstantScoreQuery(new TermsFilter(terms)), numberOfCandidateResults); // just with TermsFilter
    time = System.currentTimeMillis() - time;
    rsp.add("RawDocsCount", docs.scoreDocs.length + "");
    rsp.add("RawDocsSearchTime", time + "");
    // re-rank
    time = System.currentTimeMillis();
    TreeSet<SimpleResult> resultScoreDocs = new TreeSet<SimpleResult>();
    float maxDistance = -1f;
    float tmpScore;

    String featureFieldName = FeatureRegistry.getFeatureFieldName(hashFieldName);
    // iterating and re-ranking the documents.
    BinaryDocValues binaryValues = MultiDocValues.getBinaryValues(searcher.getIndexReader(), featureFieldName); // ***  #
    BytesRef bytesRef = new BytesRef();
    for (int i = 0; i < docs.scoreDocs.length; i++) {
        // using DocValues to retrieve the field values ...
        binaryValues.get(docs.scoreDocs[i].doc, bytesRef);
        tmpFeature.setByteArrayRepresentation(bytesRef.bytes, bytesRef.offset, bytesRef.length);
        // Getting the document from the index.
        // This is the slow step based on the field compression of stored fields.
        //            tmpFeature.setByteArrayRepresentation(d.getBinaryValue(name).bytes, d.getBinaryValue(name).offset, d.getBinaryValue(name).length);
        tmpScore = queryFeature.getDistance(tmpFeature);
        if (resultScoreDocs.size() < maximumHits) { // todo: There's potential here for a memory saver, think of a clever data structure that can do the trick without creating a new SimpleResult for each result.
            resultScoreDocs.add(
                    new SimpleResult(tmpScore, searcher.doc(docs.scoreDocs[i].doc), docs.scoreDocs[i].doc));
            maxDistance = resultScoreDocs.last().getDistance();
        } else if (tmpScore < maxDistance) {
            //                if it is nearer to the sample than at least one of the current set:
            //                remove the last one ...
            resultScoreDocs.remove(resultScoreDocs.last());
            //                add the new one ...
            resultScoreDocs.add(
                    new SimpleResult(tmpScore, searcher.doc(docs.scoreDocs[i].doc), docs.scoreDocs[i].doc));
            //                and set our new distance border ...
            maxDistance = resultScoreDocs.last().getDistance();
        }
    }
    //        System.out.println("** Creating response.");
    time = System.currentTimeMillis() - time;
    rsp.add("ReRankSearchTime", time + "");
    LinkedList list = new LinkedList();
    for (Iterator<SimpleResult> it = resultScoreDocs.iterator(); it.hasNext();) {
        SimpleResult result = it.next();
        HashMap m = new HashMap(2);
        m.put("d", result.getDistance());
        // add fields as requested:
        if (req.getParams().get("fl") == null) {
            m.put("id", result.getDocument().get("id"));
            if (result.getDocument().get("title") != null)
                m.put("title", result.getDocument().get("title"));
        } else {
            String fieldsRequested = req.getParams().get("fl");
            if (fieldsRequested.contains("score")) {
                m.put("score", result.getDistance());
            }
            if (fieldsRequested.contains("*")) {
                // all fields
                for (IndexableField field : result.getDocument().getFields()) {
                    String tmpField = field.name();
                    if (result.getDocument().getFields(tmpField).length > 1) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getValues(tmpField));
                    } else if (result.getDocument().getFields(tmpField).length > 0) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getFields(tmpField)[0].stringValue());
                    }
                }
            } else {
                StringTokenizer st;
                if (fieldsRequested.contains(","))
                    st = new StringTokenizer(fieldsRequested, ",");
                else
                    st = new StringTokenizer(fieldsRequested, " ");
                while (st.hasMoreElements()) {
                    String tmpField = st.nextToken();
                    if (result.getDocument().getFields(tmpField).length > 1) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getValues(tmpField));
                    } else if (result.getDocument().getFields(tmpField).length > 0) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getFields(tmpField)[0].stringValue());
                    }
                }
            }
        }
        //            m.put(field, result.getDocument().get(field));
        //            m.put(field.replace("_ha", "_hi"), result.getDocument().getBinaryValue(field));
        list.add(m);
    }
    rsp.add("docs", list);
    // rsp.add("Test-name", "Test-val");
}

From source file:OSFFM_ORC.FederationActionManager.java

private JSONObject bnaNetSegCreate(JSONObject tables, DBMongo m, String refSite, String tenant,
        HashMap<String, Object> updNet) {

    JSONObject bnaSegTab = new JSONObject();
    JSONArray segRow = null;//  www  . ja  va 2 s . co m
    JSONObject subJSON = null;
    Integer version = null;
    UUID uuid = null;
    String fedNet = "";
    //boolean resultIns = false;
    TreeSet<String> fednets = new TreeSet<String>();
    try {
        //fedNet = tables.getString("name");
        version = tables.getInt("version");
        JSONArray bigArray = (JSONArray) tables.get("table");
        //uuid=UUID.randomUUID();
        // JSONArray littleArray;
        for (int i = 0; i < bigArray.length(); i++) {

            uuid = UUID.randomUUID();
            JSONArray innerArray = (JSONArray) bigArray.get(i);
            for (int j = 0; j < innerArray.length(); j++) {

                JSONObject objectJson = (JSONObject) innerArray.get(j);
                fedNet = objectJson.getString("name"); //***ATTENZIONARE PERCHE NEL CASO DI OPENNEBULA LE FEDNET ALL'INTERNO DELL'INNERARRAY POTREBBERO AVERE NOMI DIVERSI DUNQUE SI PER L'INFORMAZIONE
                fednets.add(fedNet);
                bnaSegTab.put("FK", uuid.toString());
                // bnaSegTab.put("fedNet",objectJson.get("name"));
                bnaSegTab.put("netEntry", objectJson);//QUESTO  objectJson: { "tenant_id" : "b0edb3a0ae3842b2a3f3969f07cd82f2", "site_name" : "CETIC", "vnid" : "d46a55d4-6cca-4d86-bf25-f03707680795", "name" : "provider" }
                m.insertNetTables(tenant, bnaSegTab.toString(0));

            }
            m.insertTablesData(uuid.toString(), tenant, version, refSite, fedNet); //ATTENZIONARE VEDI COMMENTO ***
        }
        updNet.put(refSite, fednets.clone());
        Iterator iter = fednets.iterator();
        while (iter.hasNext()) {
            System.out.println(iter.next());
        }
        fednets.clear();
    } catch (JSONException ex) {
        System.out.println("-___-' Error: " + ex.getMessage());
    } catch (MDBIException ex) {
        System.out.println("-___-' Error: " + ex.getMessage());
    }

    return bnaSegTab;
}

From source file:org.unitime.timetable.util.SessionRollForward.java

private void rollForwardExamPeriods(Session toSession, Session fromSession) {
    ExamPeriod fromExamPeriod = null;/* w  ww  .  jav  a  2s .co  m*/
    ExamPeriod toExamPeriod = null;
    ExamPeriodDAO examPeriodDao = new ExamPeriodDAO();
    TreeSet examPeriods = ExamPeriod.findAll(fromSession.getUniqueId(), (Long) null);
    for (Iterator examPeriodIt = examPeriods.iterator(); examPeriodIt.hasNext();) {
        fromExamPeriod = (ExamPeriod) examPeriodIt.next();
        toExamPeriod = (ExamPeriod) fromExamPeriod.clone();
        toExamPeriod.setSession(toSession);
        if (toExamPeriod.getEventStartOffset() == null)
            toExamPeriod.setEventStartOffset(0);
        if (toExamPeriod.getEventStopOffset() == null)
            toExamPeriod.setEventStopOffset(0);
        examPeriodDao.save(toExamPeriod);
    }
}

From source file:guineu.modules.filter.Alignment.RANSACGCGC.RansacGCGCAlignerTask.java

/**
 *
 * @param peakList// w ww  .j  a  v  a  2s .  c om
 * @return
 */
private HashMap<PeakListRow, PeakListRow> getAlignmentMap(Dataset peakList) {

    // Create a table of mappings for best scores
    HashMap<PeakListRow, PeakListRow> alignmentMapping = new HashMap<PeakListRow, PeakListRow>();

    if (alignedPeakList.getNumberRows() < 1) {
        return alignmentMapping;
    }

    // Create a sorted set of scores matching
    TreeSet<RowVsRowGCGCScore> scoreSet = new TreeSet<RowVsRowGCGCScore>();

    // RANSAC algorithm
    List<AlignGCGCStructMol> list = ransacPeakLists(alignedPeakList, peakList);
    PolynomialFunction function = this.getPolynomialFunction(list,
            ((SimpleGCGCDataset) alignedPeakList).getRowsRTRange());

    PeakListRow allRows[] = peakList.getRows().toArray(new PeakListRow[0]);

    for (PeakListRow row : allRows) {
        double rt = 0;
        if (!this.useOnlyRTI) {
            try {
                rt = function.value(((SimplePeakListRowGCGC) row).getRT1());
                if (Double.isNaN(rt) || rt == -1) {
                    rt = ((SimplePeakListRowGCGC) row).getRT1();
                }
            } catch (Exception ee) {
            }
        } else {
            try {
                rt = function.value(((SimplePeakListRowGCGC) row).getRTI());
                if (Double.isNaN(rt) || rt == -1) {
                    rt = ((SimplePeakListRowGCGC) row).getRTI();
                }
            } catch (Exception ee) {
            }
        }
        PeakListRow candidateRows[] = null;
        if (!this.useOnlyRTI) {
            Range RTIRange = this.rtiTolerance.getToleranceRange(((SimplePeakListRowGCGC) row).getRTI());
            Range RT1Range = this.rtToleranceAfterRTcorrection.getToleranceRange(rt);
            Range RT2Range = this.rt2Tolerance.getToleranceRange(((SimplePeakListRowGCGC) row).getRT2());
            // Get all rows of the aligned peaklist within parameter limits
            candidateRows = ((SimpleGCGCDataset) alignedPeakList).getRowsInsideRT1RT2RTIRange(RT1Range,
                    RT2Range, RTIRange);
        } else {
            Range RTIRange = this.rtiTolerance.getToleranceRange(((SimplePeakListRowGCGC) row).getRTI());
            candidateRows = ((SimpleGCGCDataset) alignedPeakList).getRowsInsideRT1RT2RTIRange(RTIRange);
        }
        for (PeakListRow candidate : candidateRows) {
            RowVsRowGCGCScore score;
            try {
                score = new RowVsRowGCGCScore(row, candidate, rtiTolerance.getTolerance(),
                        rtToleranceAfterRTcorrection.getTolerance(), rt);

                scoreSet.add(score);
                errorMessage = score.getErrorMessage();

            } catch (Exception e) {
                e.printStackTrace();
                setStatus(TaskStatus.ERROR);
                return null;
            }
        }
        progress = (double) processedRows++ / (double) totalRows;
    }

    // Iterate scores by descending order
    Iterator<RowVsRowGCGCScore> scoreIterator = scoreSet.iterator();
    while (scoreIterator.hasNext()) {

        RowVsRowGCGCScore score = scoreIterator.next();

        // Check if the row is already mapped
        if (alignmentMapping.containsKey(score.getPeakListRow())) {
            continue;
        }

        // Check if the spectra score is unacceptable
        if (score.score == -10) {
            continue;
        }

        // Check if the aligned row is already filled
        if (alignmentMapping.containsValue(score.getAlignedRow())) {
            continue;
        }

        alignmentMapping.put(score.getPeakListRow(), score.getAlignedRow());

    }

    return alignmentMapping;
}

From source file:org.chiba.tools.schemabuilder.AbstractSchemaFormBuilder.java

private void buildTypeTree(XSModel schema) {
    // build the type tree for complex types
    ///*from  w  ww.j a  v  a  2s . c  o  m*/
    XSNamedMap types = schema.getComponents(XSConstants.TYPE_DEFINITION);
    int nb = types.getLength();
    for (int i = 0; i < nb; i++) {
        XSTypeDefinition t = (XSTypeDefinition) types.item(i);
        if (t.getTypeCategory() == XSTypeDefinition.COMPLEX_TYPE) {
            XSComplexTypeDefinition type = (XSComplexTypeDefinition) t;
            buildTypeTree(type, new TreeSet(TypeExtensionSorter.getInstance()));
        }
    }

    // build the type tree for simple types
    for (int i = 0; i < nb; i++) {
        XSTypeDefinition t = (XSTypeDefinition) types.item(i);
        if (t.getTypeCategory() == XSTypeDefinition.SIMPLE_TYPE) {
            XSSimpleTypeDefinition type = (XSSimpleTypeDefinition) t;
            buildTypeTree(type, new TreeSet(TypeExtensionSorter.getInstance()));
        }
    }

    // print out type hierarchy for debugging purposes
    if (LOGGER.isDebugEnabled()) {
        Iterator keys = typeTree.keySet().iterator();
        while (keys.hasNext()) {
            String typeName = (String) keys.next();
            TreeSet descendents = (TreeSet) typeTree.get(typeName);
            LOGGER.debug(">>>> for " + typeName + " Descendants=\n ");
            Iterator it = descendents.iterator();
            while (it.hasNext()) {
                XSTypeDefinition desc = (XSTypeDefinition) it.next();
                LOGGER.debug("      " + desc.getName());
            }
        }
    }
}

From source file:de.zib.scalaris.examples.wikipedia.bliki.WikiServlet.java

/**
 * Shows a page containing a list of article names.
 * /*from w w w .  j av  a2s. c o  m*/
 * @param request
 *            the request of the current operation
 * @param response
 *            the response of the current operation
 * @param result
 *            result from reading the page list
 * @param connection
 *            connection to the database
 * @param page
 *            the bean for the page
 * 
 * @throws IOException
 * @throws ServletException
 */
private void handleViewSpecialPageList(HttpServletRequest request, HttpServletResponse response,
        ValueResult<List<NormalisedTitle>> result, Connection connection, WikiPageListBean page)
        throws ServletException, IOException {
    if (result.success) {
        final TreeSet<String> pageList = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
        MyWikiModel.denormalisePageTitles(result.value, namespace, pageList);
        page.setNotice(getParam_notice(request));
        String nsPrefix = namespace.getNamespaceByNumber(page.getNamespaceId());
        if (!nsPrefix.isEmpty()) {
            nsPrefix += ":";
        }
        final String prefix = nsPrefix + page.getPrefix();
        final String from = page.getFromPage();
        final String fullFrom = nsPrefix + page.getFromPage();
        final String to = page.getToPage();
        final String fullTo = nsPrefix + page.getToPage();
        final String search = page.getSearch().toLowerCase();
        final String searchTitle = MyWikiModel.normaliseName(page.getSearch());
        boolean foundMatch = false;
        if (!prefix.isEmpty() || !from.isEmpty() || !to.isEmpty() || !search.isEmpty()) {
            // only show pages with this prefix:
            for (Iterator<String> it = pageList.iterator(); it.hasNext();) {
                final String cur = it.next();
                // case-insensitive "startsWith" check:
                if (!cur.regionMatches(true, 0, prefix, 0, prefix.length())) {
                    it.remove();
                } else if (!from.isEmpty() && cur.compareToIgnoreCase(fullFrom) <= 0) {
                    it.remove();
                } else if (!to.isEmpty() && cur.compareToIgnoreCase(fullTo) > 0) {
                    it.remove();
                } else if (!search.isEmpty() && !cur.toLowerCase().contains(search)) {
                    it.remove();
                } else if (!search.isEmpty() && cur.equals(searchTitle)) {
                    foundMatch = true;
                }
            }
        }
        page.setPages(pageList);
        page.setFoundFullMatch(foundMatch);
        page.setWikiTitle(siteinfo.getSitename());
        page.setWikiNamespace(namespace);

        forwardToPageJsp(request, response, connection, page, "pageSpecial_pagelist.jsp");
    } else {
        if (result.connect_failed) {
            setParam_error(request, "ERROR: DB connection failed");
        } else {
            setParam_error(request, "ERROR: page list unavailable");
            addToParam_notice(request, "error: unknown error getting page list for " + page.getTitle()
                    + ": <pre>" + result.message + "</pre>");
        }
        showEmptyPage(request, response, connection, page);
        return;
    }
    page.setError(getParam_error(request));
    page.setTitle(page.getTitle());
}

From source file:gemlite.shell.admin.dao.AdminDao.java

public String prB(String regionName) {
    Map param = new HashMap();

    param.put("beanName", "PrService");
    Map args = new HashMap();
    args.put("REGIONPATH", regionName);
    param.put("userArgs", args);
    Execution execution = FunctionService.onServers(clientPool).withArgs(param);
    ResultCollector rc = execution.execute("REMOTE_ADMIN_FUNCTION");
    ArrayList rs = (ArrayList) rc.getResult();
    StringBuilder sb = new StringBuilder();
    int pNum = 0, rNum = 0, tNum = 0;
    // ???ip+node?TreeSet
    TreeSet<String> ipNodeSet = new TreeSet<String>();
    TreeSet<String> ipSet = new TreeSet<String>(); // ip
    HashMap<String, Set<String>> nodeMap = new HashMap<String, Set<String>>(); // ?ipnode
    // ?HashMap,k:ip+node+? v:bucket?,bucketId?
    HashMap<String, HashMap<Integer, String>> data = new HashMap<String, HashMap<Integer, String>>();
    if (rs != null) {
        for (Object obj : rs) {
            ArrayList list = (ArrayList) obj;
            for (Object o : list) {
                if (!(o instanceof Map)) {
                    System.out.println(o.toString());
                    continue;
                }/*  www  .  jav  a  2  s.  c o  m*/
                HashMap map = (HashMap) o;
                // ??,?bucket?
                String host = (String) map.get("host");
                String node = (String) map.get("node");
                Integer BucketId = (Integer) map.get("BucketId");
                if (!ipSet.contains(host))
                    ipSet.add(host);
                Set<String> nodeSet = nodeMap.get(host);
                if (nodeSet == null) {
                    nodeSet = new TreeSet<String>();
                    nodeSet.add(node);
                    nodeMap.put(host, nodeSet);
                } else {
                    if (!nodeSet.contains(node))
                        nodeSet.add(node);
                }
                String hostAndNode = host + node;
                String singleHostNode = hostAndNode;
                tNum = (Integer) map.get("TotalNumBuckets");
                ipNodeSet.add(hostAndNode);
                // 
                if ("primary".equals(map.get("type"))) {
                    singleHostNode = primary + singleHostNode;
                    pNum++;
                } else {
                    singleHostNode = redundant + singleHostNode;
                    rNum++;
                }
                if (data.containsKey(singleHostNode)) {
                    HashMap<Integer, String> buckets = data.get(singleHostNode);
                    buckets.put(BucketId, BucketId + "\t" + map.get("Bytes") + "\t" + map.get("Size"));
                } else {
                    HashMap<Integer, String> buckets = new HashMap<Integer, String>();
                    buckets.put(BucketId, BucketId + "\t" + map.get("Bytes") + "\t" + map.get("Size"));
                    data.put(singleHostNode, buckets);
                }
            }
        }
    }

    // ?,ip,ipset??
    Iterator<String> it = ipNodeSet.iterator();
    int i = 0;
    while (it.hasNext()) {
        i++;
        String host = it.next();
        // ?bucket?
        // ,??
        String p = primary + host;
        sb.append(i + ". " + p).append("\n");
        sb.append(paraseSingleNode(data, p));
        // ?bucket?
        // ,??
        String r = redundant + host;
        sb.append(i + ". " + r).append("\n");
        sb.append(paraseSingleNode(data, r));
    }
    // ??
    sb.append("Primary Bucket Count:" + pNum).append("\n");
    sb.append("Redundant Bucket Count:" + rNum).append("\n");
    sb.append("total-num-buckets (max):" + tNum).append("\n");

    // bucket?
    checkPr(ipSet, nodeMap, data, sb);
    return sb.toString();
    //System.out.println(sb.toString());
}

From source file:org.unitime.timetable.solver.exam.ui.ExamInfoModel.java

protected Vector<ExamRoomInfo> findRooms(ExamPeriod period, int minRoomSize, int maxRoomSize, String filter,
        boolean allowConflicts) {
    Vector<ExamRoomInfo> rooms = new Vector<ExamRoomInfo>();
    boolean reqRoom = false;
    boolean reqBldg = false;
    boolean reqGroup = false;

    Exam exam = getExam().getExam(new ExamDAO().getSession());
    Set<Long> canShareRoom = getCanShareRoomExams(getExam().getExamId());

    Set groupPrefs = exam.getPreferences(RoomGroupPref.class);
    Set roomPrefs = exam.getPreferences(RoomPref.class);
    Set bldgPrefs = exam.getPreferences(BuildingPref.class);
    Set featurePrefs = exam.getPreferences(RoomFeaturePref.class);

    TreeSet locations = findAllExamLocations(period.getSession().getUniqueId(),
            period.getExamType().getUniqueId());
    Hashtable<Long, Set<Long>> locationTable = Location.findExamLocationTable(period.getUniqueId());

    if (getExamAssignment() != null) {
        if (getExamAssignment().getPeriod().equals(period) && getExamAssignment().getRooms() != null)
            for (ExamRoomInfo room : getExamAssignment().getRooms()) {
                Set<Long> exams = locationTable.get(room.getLocationId());
                if (exams != null)
                    exams.remove(getExam().getExamId());
            }//from   www.j  ava2s .c  o  m
    }
    if (iChange != null) {
        for (ExamAssignment conflict : iChange.getConflicts()) {
            if (conflict.getPeriod().equals(period) && conflict.getRooms() != null)
                for (ExamRoomInfo room : conflict.getRooms()) {
                    Set<Long> exams = locationTable.get(room.getLocationId());
                    if (exams != null)
                        exams.remove(conflict.getExamId());
                }
        }
        for (ExamAssignment current : iChange.getAssignments()) {
            ExamAssignment initial = iChange.getInitial(current);
            if (initial != null && initial.getPeriod().equals(period) && initial.getRooms() != null)
                for (ExamRoomInfo room : initial.getRooms()) {
                    Set<Long> exams = locationTable.get(room.getLocationId());
                    if (exams != null)
                        exams.remove(initial.getExamId());
                }
        }
        for (ExamAssignment current : iChange.getAssignments()) {
            if (!iExam.getExamId().equals(current.getExamId()) && current.getPeriod().equals(period)
                    && current.getRooms() != null)
                for (ExamRoomInfo room : current.getRooms()) {
                    Set<Long> exams = locationTable.get(room.getLocationId());
                    if (exams == null) {
                        exams = new HashSet<Long>();
                        locationTable.put(room.getLocationId(), exams);
                    }
                    exams.add(current.getExamId());
                }
        }
    }

    rooms: for (Iterator i1 = locations.iterator(); i1.hasNext();) {
        Location room = (Location) i1.next();

        boolean shouldNotBeUsed = PreferenceLevel.sStronglyDiscouraged
                .equals(room.getExamPreference(period).getPrefProlog());

        boolean add = true;

        PreferenceCombination pref = new SumPreferenceCombination();

        // --- group preference ----------
        PreferenceCombination groupPref = PreferenceCombination.getDefault();
        for (Iterator i2 = groupPrefs.iterator(); i2.hasNext();) {
            RoomGroupPref p = (RoomGroupPref) i2.next();
            if (p.getRoomGroup().getRooms().contains(room))
                groupPref.addPreferenceProlog(p.getPrefLevel().getPrefProlog());
        }

        if (groupPref.getPreferenceProlog().equals(PreferenceLevel.sProhibited))
            add = false;

        if (reqGroup && !groupPref.getPreferenceProlog().equals(PreferenceLevel.sRequired))
            add = false;

        if (!reqGroup && (groupPref.getPreferenceProlog().equals(PreferenceLevel.sRequired))) {
            reqGroup = true;
            rooms.clear();
        }

        if (!groupPref.getPreferenceProlog().equals(PreferenceLevel.sProhibited)
                && !groupPref.getPreferenceProlog().equals(PreferenceLevel.sRequired))
            pref.addPreferenceProlog(groupPref.getPreferenceProlog());

        // --- room preference ------------
        String roomPref = room.getExamPreference(period).getPrefProlog();

        for (Iterator i2 = roomPrefs.iterator(); i2.hasNext();) {
            RoomPref p = (RoomPref) i2.next();
            if (room.equals(p.getRoom())) {
                roomPref = p.getPrefLevel().getPrefProlog();
                shouldNotBeUsed = false;
                break;
            }
        }

        if (roomPref != null && roomPref.equals(PreferenceLevel.sProhibited))
            add = false;

        if (reqRoom && (roomPref == null || !roomPref.equals(PreferenceLevel.sRequired)))
            add = false;

        if (!reqRoom && (roomPref != null && roomPref.equals(PreferenceLevel.sRequired))) {
            reqRoom = true;
            rooms.clear();
        }

        if (roomPref != null && !roomPref.equals(PreferenceLevel.sProhibited)
                && !roomPref.equals(PreferenceLevel.sRequired))
            pref.addPreferenceProlog(roomPref);

        // --- building preference ------------
        Building bldg = (room instanceof Room ? ((Room) room).getBuilding() : null);

        String bldgPref = null;
        for (Iterator i2 = bldgPrefs.iterator(); i2.hasNext();) {
            BuildingPref p = (BuildingPref) i2.next();
            if (bldg != null && bldg.equals(p.getBuilding())) {
                bldgPref = p.getPrefLevel().getPrefProlog();
                break;
            }
        }

        if (bldgPref != null && bldgPref.equals(PreferenceLevel.sProhibited))
            add = false;

        if (reqBldg && (bldgPref == null || !bldgPref.equals(PreferenceLevel.sRequired)))
            add = false;

        if (!reqBldg && (bldgPref != null && bldgPref.equals(PreferenceLevel.sRequired))) {
            reqBldg = true;
            rooms.clear();
        }

        if (bldgPref != null && !bldgPref.equals(PreferenceLevel.sProhibited)
                && !bldgPref.equals(PreferenceLevel.sRequired))
            pref.addPreferenceProlog(bldgPref);

        // --- room features preference --------  
        boolean acceptableFeatures = true;
        PreferenceCombination featurePref = new MinMaxPreferenceCombination();
        for (Iterator i2 = featurePrefs.iterator(); i2.hasNext();) {
            RoomFeaturePref roomFeaturePref = (RoomFeaturePref) i2.next();
            RoomFeature feature = roomFeaturePref.getRoomFeature();
            String p = roomFeaturePref.getPrefLevel().getPrefProlog();

            boolean hasFeature = feature.getRooms().contains(room);
            if (p.equals(PreferenceLevel.sProhibited) && hasFeature) {
                acceptableFeatures = false;
            }
            if (p.equals(PreferenceLevel.sRequired) && !hasFeature) {
                acceptableFeatures = false;
            }
            if (p != null && hasFeature && !p.equals(PreferenceLevel.sProhibited)
                    && !p.equals(PreferenceLevel.sRequired))
                featurePref.addPreferenceProlog(p);
        }
        pref.addPreferenceInt(featurePref.getPreferenceInt());

        if (!acceptableFeatures)
            add = false;

        if (!add || shouldNotBeUsed)
            continue;

        Set<Long> exams = locationTable.get(room.getUniqueId());
        boolean roomConflict = false;
        if (exams != null && !exams.isEmpty()) {
            for (Long other : exams) {
                if (!canShareRoom.contains(other)) {
                    roomConflict = true;
                    if (!allowConflicts)
                        continue rooms;
                    if (iChange != null && iChange.getCurrent(other) != null)
                        continue rooms;
                }
            }
        }

        int cap = (getExam().getSeatingType() == Exam.sSeatingTypeExam ? room.getExamCapacity()
                : room.getCapacity());
        if (minRoomSize >= 0 && cap < minRoomSize)
            continue;
        if (maxRoomSize >= 0 && cap > maxRoomSize)
            continue;

        if (PreferenceLevel.sProhibited.equals(room.getExamPreference(period).getPrefProlog()))
            continue;

        if (!match(room.getLabel(), filter))
            continue;

        if (RoomAvailability.getInstance() != null) {
            Collection<TimeBlock> times = RoomAvailability.getInstance().getRoomAvailability(room.getUniqueId(),
                    period.getStartTime(), period.getEndTime(),
                    period.getExamType().getType() == ExamType.sExamTypeFinal
                            ? RoomAvailabilityInterface.sFinalExamType
                            : RoomAvailabilityInterface.sMidtermExamType);
            if (times != null)
                for (TimeBlock time : times) {
                    if (period.overlap(time)) {
                        sLog.info("Room " + room.getLabel() + " is not avaiable due to " + time);
                        continue rooms;
                    }
                }
        }

        rooms.add(new ExamRoomInfo(room, (roomConflict ? 1000 : 0) + pref.getPreferenceInt()));
    }

    return rooms;
}