Example usage for java.util TreeSet size

List of usage examples for java.util TreeSet size

Introduction

In this page you can find the example usage for java.util TreeSet size.

Prototype

public int size() 

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:net.firejack.platform.generate.tools.Render.java

/**
 * @param params/*w ww .  java  2  s .  c  om*/
 * @return
 */
public String renderEndpointParams(TreeSet<ServiceParam> params) {
    if (params == null || params.isEmpty())
        return "";
    StringBuilder builder = new StringBuilder();

    int i = 0;
    for (ServiceParam param : params) {
        String name = param.getName();
        ParameterTransmissionType location = param.getLocation();
        if (location == null) {
            builder.append("ServiceRequest<");
        } else if (location.equals(PATH)) {
            builder.append("@PathParam(\"").append(name).append("\") ");
        } else if (location.equals(ParameterTransmissionType.QUERY)) {
            builder.append("@QueryParam(\"").append(name).append("\") ");
        }

        builder.append(renderType(param));
        if (location == null)
            builder.append(">");
        builder.append(" ").append(name);
        if (i < params.size() - 1) {
            builder.append(",");
        }
        i++;
    }
    return builder.toString();
}

From source file:com.hichinaschool.flashcards.anki.CardEditor.java

private void actualizeTagDialog(StyledDialog ad) {
    TreeSet<String> tags = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
    for (String tag : mCol.getTags().all()) {
        tags.add(tag);//from  w w  w . j  av  a 2 s.  c o  m
    }
    tags.addAll(selectedTags);
    int len = tags.size();
    allTags = new String[len];
    boolean[] checked = new boolean[len];
    int i = 0;
    for (String t : tags) {
        allTags[i++] = t;
        if (selectedTags.contains(t)) {
            checked[i - 1] = true;
        }
    }
    ad.setMultiChoiceItems(allTags, checked, new DialogInterface.OnClickListener() {
        @Override
        public void onClick(DialogInterface arg0, int which) {
            String tag = allTags[which];
            if (selectedTags.contains(tag)) {
                // Log.i(AnkiDroidApp.TAG, "unchecked tag: " + tag);
                selectedTags.remove(tag);
            } else {
                // Log.i(AnkiDroidApp.TAG, "checked tag: " + tag);
                selectedTags.add(tag);
            }
        }
    });
}

From source file:net.semanticmetadata.lire.solr.FastLireRequestHandler.java

/**
 * Actual search implementation based on (i) hash based retrieval and (ii) feature based re-ranking.
 *
 * @param rsp// ww w.j  a  v  a  2s. c  om
 * @param searcher
 * @param hashFieldName the hash field name
 * @param maximumHits
 * @param terms
 * @param queryFeature
 * @throws java.io.IOException
 * @throws IllegalAccessException
 * @throws InstantiationException
 */
private void doSearch(SolrQueryRequest req, SolrQueryResponse rsp, SolrIndexSearcher searcher,
        String hashFieldName, int maximumHits, List<Term> terms, Query query, LireFeature queryFeature)
        throws IOException, IllegalAccessException, InstantiationException {
    // temp feature instance
    LireFeature tmpFeature = queryFeature.getClass().newInstance();
    // Taking the time of search for statistical purposes.
    time = System.currentTimeMillis();

    Filter filter = null;
    // if the request contains a filter:
    if (req.getParams().get("fq") != null) {
        // only filters with [<field>:<value> ]+ are supported
        StringTokenizer st = new StringTokenizer(req.getParams().get("fq"), " ");
        LinkedList<Term> filterTerms = new LinkedList<Term>();
        while (st.hasMoreElements()) {
            String[] tmpToken = st.nextToken().split(":");
            if (tmpToken.length > 1) {
                filterTerms.add(new Term(tmpToken[0], tmpToken[1]));
            }
        }
        if (filterTerms.size() > 0)
            filter = new TermsFilter(filterTerms);
    }

    TopDocs docs; // with query only.
    if (filter == null) {
        docs = searcher.search(query, numberOfCandidateResults);
    } else {
        docs = searcher.search(query, filter, numberOfCandidateResults);
    }
    //        TopDocs docs = searcher.search(query, new TermsFilter(terms), numberOfCandidateResults);   // with TermsFilter and boosting by simple query
    //        TopDocs docs = searcher.search(new ConstantScoreQuery(new TermsFilter(terms)), numberOfCandidateResults); // just with TermsFilter
    time = System.currentTimeMillis() - time;
    rsp.add("RawDocsCount", docs.scoreDocs.length + "");
    rsp.add("RawDocsSearchTime", time + "");
    // re-rank
    time = System.currentTimeMillis();
    TreeSet<SimpleResult> resultScoreDocs = new TreeSet<SimpleResult>();
    float maxDistance = -1f;
    float tmpScore;

    String featureFieldName = FeatureRegistry.getFeatureFieldName(hashFieldName);
    // iterating and re-ranking the documents.
    BinaryDocValues binaryValues = MultiDocValues.getBinaryValues(searcher.getIndexReader(), featureFieldName); // ***  #
    BytesRef bytesRef = new BytesRef();
    for (int i = 0; i < docs.scoreDocs.length; i++) {
        // using DocValues to retrieve the field values ...
        binaryValues.get(docs.scoreDocs[i].doc, bytesRef);
        tmpFeature.setByteArrayRepresentation(bytesRef.bytes, bytesRef.offset, bytesRef.length);
        // Getting the document from the index.
        // This is the slow step based on the field compression of stored fields.
        //            tmpFeature.setByteArrayRepresentation(d.getBinaryValue(name).bytes, d.getBinaryValue(name).offset, d.getBinaryValue(name).length);
        tmpScore = queryFeature.getDistance(tmpFeature);
        if (resultScoreDocs.size() < maximumHits) { // todo: There's potential here for a memory saver, think of a clever data structure that can do the trick without creating a new SimpleResult for each result.
            resultScoreDocs.add(
                    new SimpleResult(tmpScore, searcher.doc(docs.scoreDocs[i].doc), docs.scoreDocs[i].doc));
            maxDistance = resultScoreDocs.last().getDistance();
        } else if (tmpScore < maxDistance) {
            //                if it is nearer to the sample than at least one of the current set:
            //                remove the last one ...
            resultScoreDocs.remove(resultScoreDocs.last());
            //                add the new one ...
            resultScoreDocs.add(
                    new SimpleResult(tmpScore, searcher.doc(docs.scoreDocs[i].doc), docs.scoreDocs[i].doc));
            //                and set our new distance border ...
            maxDistance = resultScoreDocs.last().getDistance();
        }
    }
    //        System.out.println("** Creating response.");
    time = System.currentTimeMillis() - time;
    rsp.add("ReRankSearchTime", time + "");
    LinkedList list = new LinkedList();
    for (Iterator<SimpleResult> it = resultScoreDocs.iterator(); it.hasNext();) {
        SimpleResult result = it.next();
        HashMap m = new HashMap(2);
        m.put("d", result.getDistance());
        // add fields as requested:
        if (req.getParams().get("fl") == null) {
            m.put("id", result.getDocument().get("id"));
            if (result.getDocument().get("title") != null)
                m.put("title", result.getDocument().get("title"));
        } else {
            String fieldsRequested = req.getParams().get("fl");
            if (fieldsRequested.contains("score")) {
                m.put("score", result.getDistance());
            }
            if (fieldsRequested.contains("*")) {
                // all fields
                for (IndexableField field : result.getDocument().getFields()) {
                    String tmpField = field.name();
                    if (result.getDocument().getFields(tmpField).length > 1) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getValues(tmpField));
                    } else if (result.getDocument().getFields(tmpField).length > 0) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getFields(tmpField)[0].stringValue());
                    }
                }
            } else {
                StringTokenizer st;
                if (fieldsRequested.contains(","))
                    st = new StringTokenizer(fieldsRequested, ",");
                else
                    st = new StringTokenizer(fieldsRequested, " ");
                while (st.hasMoreElements()) {
                    String tmpField = st.nextToken();
                    if (result.getDocument().getFields(tmpField).length > 1) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getValues(tmpField));
                    } else if (result.getDocument().getFields(tmpField).length > 0) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getFields(tmpField)[0].stringValue());
                    }
                }
            }
        }
        //            m.put(field, result.getDocument().get(field));
        //            m.put(field.replace("_ha", "_hi"), result.getDocument().getBinaryValue(field));
        list.add(m);
    }
    rsp.add("docs", list);
    // rsp.add("Test-name", "Test-val");
}

From source file:org.dllearner.scripts.NestedCrossValidation.java

private void validate(File confFile, int outerFolds, int innerFolds, String parameter, double startValue,
        double endValue, double stepsize, boolean verbose) throws IOException, ComponentInitException {
    CLI start = new CLI(confFile);
    start.init();/*from ww w .j  a  va  2 s  . c  o m*/
    AbstractLearningProblem lp = start.getLearningProblem();
    if (!(lp instanceof PosNegLP)) {
        System.out.println("Positive only learning not supported yet.");
        System.exit(0);
    }

    // get examples and shuffle them
    LinkedList<Individual> posExamples = new LinkedList<Individual>(((PosNegLP) lp).getPositiveExamples());
    Collections.shuffle(posExamples, new Random(1));
    LinkedList<Individual> negExamples = new LinkedList<Individual>(((PosNegLP) lp).getNegativeExamples());
    Collections.shuffle(negExamples, new Random(2));

    AbstractReasonerComponent rc = start.getReasonerComponent();
    rc.init();
    String baseURI = rc.getBaseURI();

    List<TrainTestList> posLists = getFolds(posExamples, outerFolds);
    List<TrainTestList> negLists = getFolds(negExamples, outerFolds);

    // overall statistics
    Stat accOverall = new Stat();
    Stat fOverall = new Stat();
    Stat recallOverall = new Stat();
    Stat precisionOverall = new Stat();

    for (int currOuterFold = 0; currOuterFold < outerFolds; currOuterFold++) {

        logger.info("Outer fold " + currOuterFold);
        TrainTestList posList = posLists.get(currOuterFold);
        TrainTestList negList = negLists.get(currOuterFold);

        // measure relevant criterion (accuracy, F-measure) over different parameter values
        Map<Double, Stat> paraStats = new HashMap<Double, Stat>();

        for (double currParaValue = startValue; currParaValue <= endValue; currParaValue += stepsize) {

            logger.info("  Parameter value " + currParaValue + ":");
            // split train folds again (computation of inner folds for each parameter
            // value is redundant, but not a big problem)
            List<Individual> trainPosList = posList.getTrainList();
            List<TrainTestList> innerPosLists = getFolds(trainPosList, innerFolds);
            List<Individual> trainNegList = negList.getTrainList();
            List<TrainTestList> innerNegLists = getFolds(trainNegList, innerFolds);

            // measure relevant criterion for parameter (by default accuracy,
            // can also be F measure)
            Stat paraCriterionStat = new Stat();

            for (int currInnerFold = 0; currInnerFold < innerFolds; currInnerFold++) {

                logger.info("    Inner fold " + currInnerFold + ":");
                // get positive & negative examples for training run
                Set<Individual> posEx = new TreeSet<Individual>(
                        innerPosLists.get(currInnerFold).getTrainList());
                Set<Individual> negEx = new TreeSet<Individual>(
                        innerNegLists.get(currInnerFold).getTrainList());

                // read conf file and exchange options for pos/neg examples 
                // and parameter to optimise
                start = new CLI(confFile);
                start.init();
                AbstractLearningProblem lpIn = start.getLearningProblem();
                ((PosNegLP) lpIn).setPositiveExamples(posEx);
                ((PosNegLP) lpIn).setNegativeExamples(negEx);
                AbstractCELA laIn = start.getLearningAlgorithm();
                try {
                    PropertyUtils.setSimpleProperty(laIn, parameter, currParaValue);
                } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
                    e.printStackTrace();
                }

                lpIn.init();
                laIn.init();
                laIn.start();

                // evaluate learned expression
                Description concept = laIn.getCurrentlyBestDescription();

                TreeSet<Individual> posTest = new TreeSet<Individual>(
                        innerPosLists.get(currInnerFold).getTestList());
                TreeSet<Individual> negTest = new TreeSet<Individual>(
                        innerNegLists.get(currInnerFold).getTestList());

                // true positive
                Set<Individual> posCorrect = rc.hasType(concept, posTest);
                // false negative
                Set<Individual> posError = Helper.difference(posTest, posCorrect);
                // false positive
                Set<Individual> negError = rc.hasType(concept, negTest);
                // true negative
                Set<Individual> negCorrect = Helper.difference(negTest, negError);

                //               double posErrorRate = 100*(posError.size()/posTest.size());
                //               double negErrorRate = 100*(negError.size()/posTest.size());

                double accuracy = 100 * ((double) (posCorrect.size() + negCorrect.size())
                        / (posTest.size() + negTest.size()));
                double precision = 100 * (double) posCorrect.size() / (posCorrect.size() + negError.size()) == 0
                        ? 0
                        : (posCorrect.size() + negError.size());
                double recall = 100 * (double) posCorrect.size() / (posCorrect.size() + posError.size()) == 0
                        ? 0
                        : (posCorrect.size() + posError.size());
                double fmeasure = 2 * (precision * recall) / (precision + recall) == 0 ? 0
                        : (precision + recall);

                paraCriterionStat.addNumber(accuracy);

                logger.info("      hypothesis: " + concept.toManchesterSyntaxString(baseURI, null));
                logger.info("      accuracy: " + df.format(accuracy) + "%");
                logger.info("      precision: " + df.format(precision) + "%");
                logger.info("      recall: " + df.format(recall) + "%");
                logger.info("      F measure: " + df.format(fmeasure) + "%");

                if (verbose) {
                    logger.info("      false positives (neg. examples classified as pos.): "
                            + formatIndividualSet(posError, baseURI));
                    logger.info("      false negatives (pos. examples classified as neg.): "
                            + formatIndividualSet(negError, baseURI));
                }
            }

            paraStats.put(currParaValue, paraCriterionStat);
            Stat globalParaStat = globalParaStats.get(currParaValue);
            if (globalParaStat == null) {
                globalParaStat = new Stat();
                globalParaStats.put(currParaValue, globalParaStat);
            }
            globalParaStat.add(paraCriterionStat);
        }

        // decide for the best parameter
        logger.info("    Summary over parameter values:");
        double bestPara = startValue;
        double bestValue = Double.NEGATIVE_INFINITY;
        for (Entry<Double, Stat> entry : paraStats.entrySet()) {
            double para = entry.getKey();
            Stat stat = entry.getValue();
            logger.info("      value " + para + ": " + stat.prettyPrint("%"));
            if (stat.getMean() > bestValue) {
                bestPara = para;
                bestValue = stat.getMean();
            }
        }
        logger.info("      selected " + bestPara + " as best parameter value (criterion value "
                + df.format(bestValue) + "%)");
        logger.info("    Learn on Outer fold:");

        // start a learning process with this parameter and evaluate it on the outer fold
        start = new CLI(confFile);
        start.init();
        AbstractLearningProblem lpOut = start.getLearningProblem();
        ((PosNegLP) lpOut)
                .setPositiveExamples(new TreeSet<Individual>(posLists.get(currOuterFold).getTrainList()));
        ((PosNegLP) lpOut)
                .setNegativeExamples(new TreeSet<Individual>(negLists.get(currOuterFold).getTrainList()));
        AbstractCELA laOut = start.getLearningAlgorithm();
        try {
            PropertyUtils.setSimpleProperty(laOut, parameter, bestPara);
        } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
            e.printStackTrace();
        }

        lpOut.init();
        laOut.init();
        laOut.start();

        // evaluate learned expression
        Description concept = laOut.getCurrentlyBestDescription();

        TreeSet<Individual> posTest = new TreeSet<Individual>(posLists.get(currOuterFold).getTestList());
        TreeSet<Individual> negTest = new TreeSet<Individual>(negLists.get(currOuterFold).getTestList());

        AbstractReasonerComponent rs = start.getReasonerComponent();
        // true positive
        Set<Individual> posCorrect = rs.hasType(concept, posTest);
        // false negative
        Set<Individual> posError = Helper.difference(posTest, posCorrect);
        // false positive
        Set<Individual> negError = rs.hasType(concept, negTest);
        // true negative
        Set<Individual> negCorrect = Helper.difference(negTest, negError);

        double accuracy = 100
                * ((double) (posCorrect.size() + negCorrect.size()) / (posTest.size() + negTest.size()));
        double precision = 100 * (double) posCorrect.size() / (posCorrect.size() + negError.size());
        double recall = 100 * (double) posCorrect.size() / (posCorrect.size() + posError.size());
        double fmeasure = 2 * (precision * recall) / (precision + recall);

        logger.info("      hypothesis: " + concept.toManchesterSyntaxString(baseURI, null));
        logger.info("      accuracy: " + df.format(accuracy) + "%");
        logger.info("      precision: " + df.format(precision) + "%");
        logger.info("      recall: " + df.format(recall) + "%");
        logger.info("      F measure: " + df.format(fmeasure) + "%");

        if (verbose) {
            logger.info("      false positives (neg. examples classified as pos.): "
                    + formatIndividualSet(posError, baseURI));
            logger.info("      false negatives (pos. examples classified as neg.): "
                    + formatIndividualSet(negError, baseURI));
        }

        // update overall statistics
        accOverall.addNumber(accuracy);
        fOverall.addNumber(fmeasure);
        recallOverall.addNumber(recall);
        precisionOverall.addNumber(precision);

        // free memory
        rs.releaseKB();
    }

    globalAcc.add(accOverall);
    globalF.add(fOverall);
    globalPrecision.add(precisionOverall);
    globalRecall.add(recallOverall);

    // overall statistics
    logger.info("*******************");
    logger.info("* Overall Results *");
    logger.info("*******************");
    logger.info("accuracy: " + accOverall.prettyPrint("%"));
    logger.info("F measure: " + fOverall.prettyPrint("%"));
    logger.info("precision: " + precisionOverall.prettyPrint("%"));
    logger.info("recall: " + recallOverall.prettyPrint("%"));
}

From source file:org.apache.roller.weblogger.ui.rendering.model.SearchResultsFeedModel.java

private void convertHitsToEntries(Hits hits) throws WebloggerException {

    // determine offset
    this.offset = feedRequest.getPage() * this.entryCount;
    if (this.offset >= hits.length()) {
        this.offset = 0;
    }// ww w.ja  va  2 s . c om

    // determine limit
    this.limit = this.entryCount;
    if (this.offset + this.limit > hits.length()) {
        this.limit = hits.length() - this.offset;
    }

    try {
        TreeSet categories = new TreeSet();
        Weblogger roller = WebloggerFactory.getWeblogger();
        WeblogEntryManager weblogMgr = roller.getWeblogEntryManager();

        WeblogEntry entry = null;
        Document doc = null;
        String handle = null;
        Timestamp now = new Timestamp(new Date().getTime());
        for (int i = offset; i < offset + limit; i++) {

            entry = null; // reset for each iteration

            doc = hits.doc(i);
            handle = doc.getField(FieldConstants.WEBSITE_HANDLE).stringValue();

            if (websiteSpecificSearch && handle.equals(feedRequest.getWeblogHandle())) {

                entry = weblogMgr.getWeblogEntry(doc.getField(FieldConstants.ID).stringValue());
            } else {

                entry = weblogMgr.getWeblogEntry(doc.getField(FieldConstants.ID).stringValue());

                if (doc.getField(FieldConstants.CATEGORY) != null) {
                    categories.add(doc.getField(FieldConstants.CATEGORY).stringValue());
                }
            }

            // maybe null if search result returned inactive user
            // or entry's user is not the requested user.
            // but don't return future posts
            if (entry != null && entry.getPubTime().before(now)) {
                results.add(WeblogEntryWrapper.wrap(entry, urlStrategy));
            }
        }

        if (categories.size() > 0) {
            this.categories = categories;
        }
    } catch (IOException e) {
        throw new WebloggerException(e);
    }
}

From source file:org.apache.roller.weblogger.ui.rendering.model.SearchResultsModel.java

private void convertHitsToEntries(Hits hits) throws WebloggerException {

    // determine offset
    this.offset = searchRequest.getPageNum() * RESULTS_PER_PAGE;
    if (this.offset >= hits.length()) {
        this.offset = 0;
    }/*from  w  ww.j a  v a  2  s.com*/

    // determine limit
    this.limit = RESULTS_PER_PAGE;
    if (this.offset + this.limit > hits.length()) {
        this.limit = hits.length() - this.offset;
    }

    try {
        TreeSet categories = new TreeSet();
        Weblogger roller = WebloggerFactory.getWeblogger();
        WeblogEntryManager weblogMgr = roller.getWeblogEntryManager();

        WeblogEntry entry = null;
        Document doc = null;
        String handle = null;
        Timestamp now = new Timestamp(new Date().getTime());
        for (int i = offset; i < offset + limit; i++) {

            entry = null; // reset for each iteration

            doc = hits.doc(i);
            handle = doc.getField(FieldConstants.WEBSITE_HANDLE).stringValue();

            if (websiteSpecificSearch && handle.equals(searchRequest.getWeblogHandle())) {

                entry = weblogMgr.getWeblogEntry(doc.getField(FieldConstants.ID).stringValue());
            } else {

                entry = weblogMgr.getWeblogEntry(doc.getField(FieldConstants.ID).stringValue());

                if (doc.getField(FieldConstants.CATEGORY) != null) {
                    categories.add(doc.getField(FieldConstants.CATEGORY).stringValue());
                }
            }

            // maybe null if search result returned inactive user
            // or entry's user is not the requested user.
            // but don't return future posts
            if (entry != null && entry.getPubTime().before(now)) {
                addEntryToResults(WeblogEntryWrapper.wrap(entry, urlStrategy));
            }
        }

        if (categories.size() > 0) {
            this.categories = categories;
        }
    } catch (IOException e) {
        throw new WebloggerException(e);
    }
}

From source file:com.joliciel.talismane.parser.ParseEvaluationSentenceWriter.java

@Override
public void onNextParseConfiguration(ParseConfiguration realConfiguration,
        List<ParseConfiguration> guessedConfigurations) {
    try {//from   w w w .  j a  v  a  2s .c  om
        TreeSet<Integer> startIndexes = new TreeSet<Integer>();
        for (PosTaggedToken posTaggedToken : realConfiguration.getPosTagSequence()) {
            if (!posTaggedToken.getTag().equals(PosTag.ROOT_POS_TAG)) {
                Token token = posTaggedToken.getToken();
                startIndexes.add(token.getStartIndex());
            }
        }
        if (hasTokeniser || hasPosTagger) {
            int i = 0;
            for (ParseConfiguration guessedConfiguration : guessedConfigurations) {
                for (PosTaggedToken posTaggedToken : guessedConfiguration.getPosTagSequence()) {
                    if (!posTaggedToken.getTag().equals(PosTag.ROOT_POS_TAG)) {
                        Token token = posTaggedToken.getToken();
                        startIndexes.add(token.getStartIndex());
                    }
                }
                i++;
                if (i == guessCount)
                    break;
            }
        }
        Map<Integer, Integer> startIndexMap = new HashMap<Integer, Integer>();
        int j = 0;
        for (int startIndex : startIndexes) {
            startIndexMap.put(startIndex, j++);
        }

        PosTagSequence posTagSequence = realConfiguration.getPosTagSequence();
        PosTaggedToken[] realTokens = new PosTaggedToken[startIndexes.size()];
        for (PosTaggedToken posTaggedToken : posTagSequence) {
            if (!posTaggedToken.getTag().equals(PosTag.ROOT_POS_TAG)) {
                realTokens[startIndexMap.get(posTaggedToken.getToken().getStartIndex())] = posTaggedToken;
            }
        }

        for (PosTaggedToken posTaggedToken : realTokens) {
            if (posTaggedToken != null) {
                csvFileWriter.write(CSV.format(posTaggedToken.getToken().getOriginalText()));
            } else {
                csvFileWriter.write(CSV.getCsvSeparator());
            }
        }

        csvFileWriter.write("\n");
        for (PosTaggedToken posTaggedToken : realTokens) {
            if (posTaggedToken != null) {
                csvFileWriter.write(CSV.format(posTaggedToken.getTag().getCode()));
            } else {
                csvFileWriter.write(CSV.getCsvSeparator());
            }
        }
        csvFileWriter.write("\n");
        for (PosTaggedToken posTaggedToken : realTokens) {
            if (posTaggedToken != null) {
                DependencyArc realArc = realConfiguration.getGoverningDependency(posTaggedToken);
                String realLabel = realArc.getLabel() == null ? "null" : realArc.getLabel();
                csvFileWriter.write(CSV.format(realLabel));
            } else {
                csvFileWriter.write(CSV.getCsvSeparator());
            }
        }
        csvFileWriter.write("\n");
        for (PosTaggedToken posTaggedToken : realTokens) {
            if (posTaggedToken != null) {
                DependencyArc realArc = realConfiguration.getGoverningDependency(posTaggedToken);
                int startIndex = -1;
                if (realArc != null) {
                    PosTaggedToken head = realArc.getHead();
                    if (!head.getTag().equals(PosTag.ROOT_POS_TAG)) {
                        startIndex = head.getToken().getStartIndex();
                    }
                }
                if (startIndex < 0)
                    csvFileWriter.write(CSV.format("ROOT"));
                else
                    csvFileWriter
                            .write(CSV.getColumnLabel(startIndexMap.get(startIndex)) + CSV.getCsvSeparator());
            } else {
                csvFileWriter.write(CSV.getCsvSeparator());
            }
        }
        csvFileWriter.write("\n");

        for (int i = 0; i < guessCount; i++) {
            if (i < guessedConfigurations.size()) {
                ParseConfiguration guessedConfiguration = guessedConfigurations.get(i);
                PosTaggedToken[] guessedTokens = new PosTaggedToken[startIndexes.size()];
                for (PosTaggedToken posTaggedToken : guessedConfiguration.getPosTagSequence()) {
                    if (!posTaggedToken.getTag().equals(PosTag.ROOT_POS_TAG)) {
                        guessedTokens[startIndexMap
                                .get(posTaggedToken.getToken().getStartIndex())] = posTaggedToken;
                    }
                }

                if (hasTokeniser) {
                    for (PosTaggedToken posTaggedToken : guessedTokens) {
                        if (posTaggedToken != null) {
                            csvFileWriter.write(CSV.format(posTaggedToken.getToken().getOriginalText()));
                        } else {
                            csvFileWriter.write(CSV.getCsvSeparator());
                        }
                    }

                    csvFileWriter.write("\n");
                }

                if (hasPosTagger) {
                    for (PosTaggedToken posTaggedToken : guessedTokens) {
                        if (posTaggedToken != null) {
                            csvFileWriter.write(CSV.format(posTaggedToken.getTag().getCode()));
                        } else {
                            csvFileWriter.write(CSV.getCsvSeparator());
                        }
                    }
                    csvFileWriter.write("\n");
                }

                for (PosTaggedToken posTaggedToken : guessedTokens) {
                    if (posTaggedToken != null) {
                        DependencyArc guessedArc = guessedConfiguration.getGoverningDependency(posTaggedToken);
                        String guessedLabel = "";
                        if (guessedArc != null) {
                            guessedLabel = guessedArc.getLabel() == null ? "null" : guessedArc.getLabel();
                        }
                        csvFileWriter.write(CSV.format(guessedLabel));
                    } else {
                        csvFileWriter.write(CSV.getCsvSeparator());
                    }
                }
                csvFileWriter.write("\n");
                for (PosTaggedToken posTaggedToken : guessedTokens) {
                    if (posTaggedToken != null) {
                        DependencyArc guessedArc = guessedConfiguration.getGoverningDependency(posTaggedToken);
                        int startIndex = -1;
                        if (guessedArc != null) {
                            PosTaggedToken head = guessedArc.getHead();
                            if (!head.getTag().equals(PosTag.ROOT_POS_TAG)) {
                                startIndex = head.getToken().getStartIndex();
                            }
                        }
                        if (startIndex < 0)
                            csvFileWriter.write(CSV.format("ROOT"));
                        else
                            csvFileWriter.write(
                                    CSV.getColumnLabel(startIndexMap.get(startIndex)) + CSV.getCsvSeparator());
                    } else {
                        csvFileWriter.write(CSV.getCsvSeparator());
                    }
                }
                csvFileWriter.write("\n");
                for (PosTaggedToken posTaggedToken : guessedTokens) {
                    if (posTaggedToken != null) {
                        DependencyArc guessedArc = guessedConfiguration.getGoverningDependency(posTaggedToken);
                        double prob = 1.0;
                        if (guessedArc != null) {
                            Transition transition = guessedConfiguration.getTransition(guessedArc);
                            if (transition != null)
                                prob = transition.getDecision().getProbability();
                        }
                        csvFileWriter.write(CSV.format(prob));
                    } else {
                        csvFileWriter.write(CSV.getCsvSeparator());
                    }
                }
                csvFileWriter.write("\n");

            } else {
                csvFileWriter.write("\n");
                csvFileWriter.write("\n");
            } // have more configurations
        } // next guessed configuration
        csvFileWriter.flush();
    } catch (IOException ioe) {
        LogUtils.logError(LOG, ioe);
        throw new RuntimeException(ioe);
    }
}

From source file:sbml.test.UploadUnzipTest.java

private TreeSet<UserTestCase> unzipUserArchive(ServletFileUpload reqHandler)
        throws ServletException, IOException {
    // parseRequest() returns a list of items, but our particular
    // httpRequest will only have one: the zip file uploaded by the user.
    // If we don't get that, something went wrong.

    List items;//from www  . java  2s .  c  o  m
    try {
        items = reqHandler.parseRequest(httpRequest);
    } catch (FileUploadException e) {
        propagateError(BAD_UPLOAD, e);
        return null;
    }

    // Some sanity checking.  The case of > 1 shouldn't happen because
    // we're in control of this part (via uploadresults.jsp), but let's
    // check it in case someone edits things in the future and
    // inadvertently breaks this part.

    if (items.isEmpty()) {
        propagateError(BAD_UPLOAD, "No file uploaded.");
        return null;
    } else if (items.size() > 1) {
        propagateError(BAD_UPLOAD, "More than one file uploaded.");
        return null;
    }

    // Unzip the file and write out the individual file contents to
    // disk.  This will create a bunch of files that are expected to be
    // the user's CSV results files.  We create objects representing
    // each of these user results and put them in a list.  We ignore
    // any files that don't have the expected name pattern.

    FileItem zipFile = (FileItem) items.get(0);
    TreeSet<UserTestCase> cases = new TreeSet<UserTestCase>();
    ArrayList<String> badFileNames = new ArrayList<String>();
    try {
        ZipInputStream zis = new ZipInputStream(zipFile.getInputStream());
        ZipEntry entry;
        UserTestCase theCase;

        while ((entry = zis.getNextEntry()) != null) {
            String fileName = entry.getName();
            String caseNumStr = caseNameFromFileName(fileName);
            if (caseNumStr == null) {
                badFileNames.add(fileName);
                continue;
            }

            File path = UserTestCase.pathToDataFile(uploadDir, caseNumStr);
            FileOutputStream fs = new FileOutputStream(path);
            BufferedOutputStream bos = new BufferedOutputStream(fs, 2048);

            int count;
            byte data[] = new byte[2048];

            while ((count = zis.read(data, 0, 2048)) != -1)
                bos.write(data, 0, count);

            bos.flush();
            bos.close();

            theCase = new UserTestCase(refCasesDir, uploadDir, caseNumStr);
            cases.add(theCase);
        }
        zis.close();
    } catch (Exception e) {
        propagateError(SERVER_ERROR, e);
        return null;
    }

    if (cases.size() >= 1) {
        OnlineSTS.logInfo(httpRequest, "Got " + cases.size() + " case" + (cases.size() > 1 ? "s" : ""));
        return cases;
    } else {
        if (badFileNames.size() >= 1)
            propagateError(BAD_FILE_NAMES, badFileNames.get(0));
        else
            propagateError(EMPTY_ARCHIVE, zipFile.getName());

        return null;
    }
}

From source file:org.hyperic.hq.measurement.server.session.AvailabilityManagerImpl.java

private AvailabilityDataRLE getLastAvail(DataPoint state,
        Map<Integer, TreeSet<AvailabilityDataRLE>> currAvails) {
    Integer mId = state.getMeasurementId();
    TreeSet<AvailabilityDataRLE> rles = currAvails.get(mId);
    if (rles.size() == 0) {
        return null;
    }//from w  ww . j a v  a  2 s . c  o m
    return rles.last();
}

From source file:org.geoserver.security.iride.IrideRoleService.java

@Override
public SortedSet<GeoServerRole> getRolesForUser(final String username) throws IOException {
    LOGGER.trace("User: {}", username);

    final TreeSet<GeoServerRole> roles = new TreeSet<>();

    final IrideIdentity irideIdentity = IrideIdentity.parseIrideIdentity(username);
    if (irideIdentity != null) {
        final IrideRole[] irideRoles = this.getIrideService().findRuoliForPersonaInApplication(irideIdentity,
                new IrideApplication(this.config.applicationName));
        for (final IrideRole irideRole : irideRoles) {
            roles.add(this.createRoleObject(irideRole.toMnemonicRepresentation()));
        }/*from   ww  w . j a  v a2  s  . co  m*/
    }

    // Rely on the fallback RoleService (if configured) when no IRIDE roles are available for the given user
    if (roles.isEmpty() && this.config.hasFallbackRoleServiceName()) {
        LOGGER.info("No IRIDE roles available for the given user {}: falling back to RoleService '{}'",
                username, this.config.fallbackRoleServiceName);

        final GeoServerRoleService fallbackRoleService = this.getSecurityManager()
                .loadRoleService(this.config.fallbackRoleServiceName);
        if (fallbackRoleService != null) {
            roles.addAll(fallbackRoleService.getRolesForUser(username));
        } else {
            LOGGER.warn("A fallback RoleService '{}' was specified, but none was found!",
                    this.config.fallbackRoleServiceName);
        }
    }

    LOGGER.trace("Added {} roles for User {}", roles.size(), username);

    return ImmutableSortedSet.copyOf(roles);
}