Example usage for java.util TreeMap keySet

List of usage examples for java.util TreeMap keySet

Introduction

In this page you can find the example usage for java.util TreeMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:net.spfbl.core.Analise.java

protected static void dumpClusterCIDR(StringBuilder builder) {
    TreeMap<String, Short[]> map = getClusterMap();
    for (String token : map.keySet()) {
        Short[] dist = map.get(token);
        int spam = dist[1];
        if (spam > 512) {
            int ham = dist[0];
            float total = ham + spam;
            float reputation = spam / total;
            if (reputation > CLUSTER_RED) {
                if (Subnet.isValidCIDR(token)) {
                    if (!Block.contains(token)) {
                        builder.append(token);
                        builder.append(' ');
                        builder.append(ham);
                        builder.append(' ');
                        builder.append(spam);
                        builder.append('\n');
                    }// w  ww . jav  a2s  . co  m
                }
            }
        }
    }
}

From source file:net.spfbl.core.Analise.java

protected static void dumpClusterMask(StringBuilder builder) {
    TreeMap<String, Short[]> map = getClusterMap();
    for (String token : map.keySet()) {
        if (token.contains("#") || token.contains(".H.")) {
            Short[] dist = map.get(token);
            int spam = dist[1];
            if (spam > 512) {
                int ham = dist[0];
                float total = ham + spam;
                float reputation = spam / total;
                if (reputation > CLUSTER_RED) {
                    if (!Generic.containsGenericExact(token)) {
                        String hostname = token.replace("#", "0");
                        hostname = hostname.replace(".H.", ".0a.");
                        if (!Block.contains(hostname)) {
                            builder.append(token);
                            builder.append(' ');
                            builder.append(ham);
                            builder.append(' ');
                            builder.append(spam);
                            builder.append('\n');
                        }//from  ww w.j  a v  a  2s  . c  om
                    }
                }
            }
        }
    }
}

From source file:edu.umm.radonc.ca_dash.controllers.PieChartController.java

public void updateChart(String dataSet) {
    TreeMap<String, Long> mtxcounts;
    TreeMap<String, Long> dptcounts;
    TreeMap<String, SynchronizedDescriptiveStatistics> mptstats;
    TreeMap<String, SynchronizedDescriptiveStatistics> ptstats;
    pieChart.clear();//from  w  ww .j  ava 2 s . c o  m
    dstats.clear();
    dstatsPerDoc.clear();
    dstatsPerRTM.clear();

    JSONArray labels = new JSONArray();

    if (dataSet.equals("DR")) {
        dptcounts = getFacade().doctorPtCounts(startDate, endDate, selectedFacility, selectedFilters);
        ptstats = getFacade().doctorStats(startDate, endDate, selectedFacility, selectedFilters);
        for (String doctor : dptcounts.keySet()) {
            Long count = dptcounts.get(doctor);
            DoctorStats newItem = new DoctorStats();
            newItem.setTotalPatients(count);
            newItem.setAverageDailyPatients(ptstats.get(doctor));
            dstatsPerDoc.put(doctor, newItem);
            pieChart.set(doctor, newItem.getAverageDailyPatients().getMean());
            dstats.addValue(count);
            try {
                String item = doctor + "<br/>( mean: " + Math.round(newItem.getAverageDailyPatients().getMean())
                        + ", &#963;: " + decf.format(newItem.getAverageDailyPatients().getStandardDeviation())
                        + " )";
                labels.put(item);
            } catch (Exception e) {
                //FIXME
            }
        }

        pieChart.setTitle("Physician Workload: " + df.format(startDate) + " - " + df.format(endDate));
    } else {
        mtxcounts = getFacade().machineTxCounts(startDate, endDate, selectedFacility, selectedFilters);
        mptstats = getFacade().machineStats(startDate, endDate, selectedFacility, selectedFilters);
        pieChart.setTitle("Tx per Machine: " + df.format(startDate) + " - " + df.format(endDate));

        for (String machine : mtxcounts.keySet()) {
            Long count = mtxcounts.get(machine);
            DoctorStats newItem = new DoctorStats();
            newItem.setTotalPatients(count);
            newItem.setAverageDailyPatients(mptstats.get(machine));
            dstatsPerRTM.put(machine, newItem);
            pieChart.set(machine, newItem.getAverageDailyPatients().getMean());
            dstats.addValue(count);
            try {
                String item = machine + "<br/>( mean: "
                        + Math.round(newItem.getAverageDailyPatients().getMean()) + ", &#963;: "
                        + decf.format(newItem.getAverageDailyPatients().getStandardDeviation()) + " )";
                labels.put(item);
            } catch (Exception e) {
                //FIXME
            }
        }
    }

    //pieChart.setLegendPosition("ne");
    pieChart.setShowDataLabels(true);
    pieChart.setShadow(false);
    //pieChart.setDataFormat("value");
    pieChart.setSeriesColors("8C3130, E0AB5D, 4984D0, 2C2A29, A2B85C, BBBEC3, D8C9B6, BD8A79, 3C857A, CD3935");
    pieChart.setExtender("function(){ this.cfg.seriesDefaults.rendererOptions.dataLabels = " + labels.toString()
            + "; " + "this.cfg.seriesDefaults.rendererOptions.dataLabelPositionFactor = 1.21; "
            + "this.cfg.seriesDefaults.rendererOptions.diameter = 600; "
            + "this.cfg.seriesDefaults.rendererOptions.dataLabelThreshold = 0.5;" + "this.cfg.sliceMargin = 3; "
            + "this.legend = {show:false} }");
}

From source file:com.sfs.whichdoctor.webservice.RotationXmlOutputImpl.java

/**
 * Gets the tool count xml./*from   w w  w .  ja  va2  s.c  o  m*/
 *
 * @param type the type
 * @param toolCounts the tool counts
 * @return the tool count xml
 */
private Element getToolCountXml(final String type, final TreeMap<String, ToolCount> toolCounts) {

    Element tctXml = new Element("ToolCountType");
    tctXml.setAttribute("name", type);

    for (String id : toolCounts.keySet()) {
        ToolCount tc = toolCounts.get(id);

        Element tcXml = new Element("ToolCount");
        tcXml.setAttribute("name", tc.getName());
        tcXml.setAttribute("subName", tc.getSubName());
        tcXml.setAttribute("STP", tc.getTrainingProgramShortName());
        tcXml.addContent(String.valueOf(tc.getCount()));

        tctXml.addContent(tcXml);
    }
    return tctXml;
}

From source file:org.intermine.bio.web.displayer.MouseAllelesDisplayer.java

@SuppressWarnings({ "unchecked", "unused" })
@Override// ww  w. j a v  a 2s. c  om
public void display(HttpServletRequest request, ReportObject reportObject) {
    HttpSession session = request.getSession();
    im = SessionMethods.getInterMineAPI(session);
    Model model = im.getModel();
    PathQueryExecutor executor = im.getPathQueryExecutor(SessionMethods.getProfile(session));

    // Counts of HLPT Names
    PathQuery q = new PathQuery(model);

    Integer alleleCount = 0;
    Boolean mouser = false;
    if (!MouseAllelesDisplayer.isThisAMouser(reportObject)) {
        // to give us some homologue identifier and the actual terms to tag-cloudize
        q.addViews("Gene.symbol", "Gene.primaryIdentifier", "Gene.id",
                "Gene.homologues.homologue.alleles.genotypes.phenotypeTerms.name");
        // add this rubbish so we do not filter out the same terms
        q.addViews("Gene.homologues.homologue.id", "Gene.homologues.homologue.alleles.id",
                "Gene.homologues.homologue.alleles.genotypes.id");

        // mouse homologues only
        q.addConstraint(Constraints.eq("Gene.homologues.homologue.organism.shortName", "M. musculus"), "A");
        // for our gene object
        q.addConstraint(Constraints.eq("Gene.id", reportObject.getObject().getId().toString()), "B");
        // we want only those homologues that have a non-empty alleles collection
        q.addConstraint(Constraints.isNotNull("Gene.homologues.homologue.alleles.id"));
        q.setConstraintLogic("A and B");
        // order by the homologue db id, just to keep the alleles in a reasonable order
        q.addOrderBy("Gene.homologues.homologue.id", OrderDirection.ASC);

        // allele count
        PathQuery cq = new PathQuery(im.getModel());
        cq.addViews("Gene.homologues.homologue.alleles.primaryIdentifier");
        cq.addConstraint(Constraints.eq("Gene.homologues.homologue.organism.shortName", "M. musculus"), "A");
        cq.addConstraint(Constraints.eq("Gene.id", reportObject.getObject().getId().toString()), "B");
        cq.setConstraintLogic("A and B");
        try {
            alleleCount = executor.count(cq);
        } catch (ObjectStoreException e) {
            // couldn't get count
        }
    } else {
        mouser = true;

        // to give us some homologue identifier and the actual terms to tag-cloudize
        q.addViews("Gene.symbol", "Gene.primaryIdentifier", "Gene.id",
                "Gene.alleles.genotypes.phenotypeTerms.name");
        // add this rubbish so we do not filter out the same terms
        q.addViews("Gene.alleles.id", "Gene.alleles.genotypes.id");

        // for our gene object
        q.addConstraint(Constraints.eq("Gene.id", reportObject.getObject().getId().toString()), "A");
        // we want only those homologues that have a non-empty alleles collection
        q.addConstraint(Constraints.isNotNull("Gene.alleles.id"));

        // mouser has a collection table of alleles as well
        for (FieldDescriptor fd : reportObject.getClassDescriptor().getAllFieldDescriptors()) {
            if ("alleles".equals(fd.getName()) && fd.isCollection()) {
                // fetch the collection
                Collection<?> collection = null;
                try {
                    collection = (Collection<?>) reportObject.getObject().getFieldValue("alleles");
                } catch (IllegalAccessException e) {
                    e.printStackTrace();
                }

                List<Class<?>> lc = PathQueryResultHelper.queryForTypesInCollection(reportObject.getObject(),
                        "alleles", im.getObjectStore());

                if (collection == null) {
                    return;
                }

                // create an InlineResultsTable
                InlineResultsTable t = new InlineResultsTable(collection, fd.getClassDescriptor().getModel(),
                        SessionMethods.getWebConfig(request), im.getClassKeys(), collection.size(), false, lc);

                request.setAttribute("collection", t);

                // Get the number of alleles.
                alleleCount = collection.size();

                break;
            }
        }
    }

    ExportResultsIterator qResults;
    try {
        qResults = executor.execute(q);
    } catch (ObjectStoreException e) {
        throw new RuntimeException(e);
    }
    // traverse so we get a nice map from homologue symbol to a map of allele term names (and
    //  some extras)
    HashMap<String, HashMap<String, Object>> counts = new HashMap<String, HashMap<String, Object>>();
    while (qResults.hasNext()) {
        List<ResultElement> row = qResults.next();
        String sourceGeneSymbol = getIdentifier(row);
        // a per source gene map
        HashMap<String, Integer> terms;
        if (!counts.containsKey(sourceGeneSymbol)) {
            HashMap<String, Object> wrapper = new HashMap<String, Object>();
            terms = new LinkedHashMap<String, Integer>();
            wrapper.put("terms", terms);
            wrapper.put("homologueId",
                    (mouser) ? row.get(2).getField().toString() : row.get(4).getField().toString());
            wrapper.put("isMouser", mouser);
            counts.put(sourceGeneSymbol, wrapper);
        } else {
            terms = (HashMap<String, Integer>) counts.get(sourceGeneSymbol).get("terms");
        }
        // populate the allele term with count
        String alleleTerm = row.get(3).getField().toString();
        if (!alleleTerm.isEmpty()) {
            if (!terms.containsKey(alleleTerm)) {
                terms.put(alleleTerm, 1);
            } else {
                terms.put(alleleTerm, terms.get(alleleTerm) + 1);
            }
        }
    }

    // Now give us a map of top 20 per homologue
    HashMap<String, HashMap<String, Object>> top = new HashMap<String, HashMap<String, Object>>();
    for (String symbol : counts.keySet()) {
        HashMap<String, Object> gene = counts.get(symbol);
        LinkedHashMap<String, Integer> terms = (LinkedHashMap<String, Integer>) gene.get("terms");
        if (terms != null) {
            // sorted by value
            TreeMap<String, Integer> sorted = new TreeMap<String, Integer>(new IntegerValueComparator(terms));
            // deep copy
            for (String term : terms.keySet()) {
                sorted.put(term, terms.get(term));
            }
            // "mark" top 20 and order by natural order - the keys
            TreeMap<String, Map<String, Object>> marked = new TreeMap<String, Map<String, Object>>();
            Integer i = 0;
            for (String term : sorted.keySet()) {
                // wrapper map
                HashMap<String, Object> m = new HashMap<String, Object>();
                // am I top dog?
                Boolean topTerm = false;
                if (i < 20) {
                    topTerm = true;
                }
                m.put("top", topTerm);
                m.put("count", sorted.get(term));
                m.put("url", getUrl((String) gene.get("homologueId"), term));

                // save it
                marked.put(term, m);
                i++;
            }

            HashMap<String, Object> wrapper = new HashMap<String, Object>();
            wrapper.put("terms", marked);
            wrapper.put("homologueId", gene.get("homologueId"));
            wrapper.put("isMouser", gene.get("isMouser"));
            top.put(symbol, wrapper);
        }
    }

    request.setAttribute("thisIsAMouser", mouser);

    request.setAttribute("counts", top);

    request.setAttribute("alleleCount", alleleCount);
}

From source file:se.sics.gvod.common.GraphUtil.java

public GraphUtil(TreeMap<VodAddress, VodNeighbors> alivePeers) {
    super();//from   ww w  .  j av  a2  s . c  om
    n = alivePeers.size();
    m = new byte[n][n];
    dist = new int[n][n];
    inDegree = new double[n];
    outDegree = new int[n];
    clustering = new double[n];
    a = new VodAddress[n];
    map = new HashMap<VodAddress, Integer>();
    neighbors = new int[n][];
    inStats = new SummaryStatistics();
    outStats = new SummaryStatistics();

    // map all alive nodes to a contiguous sequence of integers
    {
        int p = 0;
        for (VodAddress address : alivePeers.keySet()) {
            VodAddress src = (VodAddress) address;
            utilitySetNbChange += (alivePeers.get(src).getUtilitySetNbChange()
                    / alivePeers.get(src).getNbCycles());
            upperSetNbChange += (alivePeers.get(src).getUpperSetNbChange() / alivePeers.get(src).getNbCycles());
            nbCycles += alivePeers.get(src).getNbCycles();
            a[p] = src;
            map.put(src, p);
            p++;
        }
    }

    // build adjacency matrix
    int d = -1;
    {
        try {
            for (int s = 0; s < a.length; s++) {
                VodAddress src = a[s];
                VodNeighbors neigh = alivePeers.get(src);
                int nn = 0;
                for (VodDescriptor desc : neigh.getRandomSetDescriptors()) {
                    VodAddress dst = desc.getVodAddress();
                    if (!map.containsKey(dst)) {
                        continue;
                    }
                    d = map.get(dst);
                    m[s][d] = 1;
                    inDegree[d]++;
                    outDegree[s]++;
                    nn++;
                }
                neighbors[s] = new int[nn];
                nn = 0;
                for (VodDescriptor desc : neigh.getRandomSetDescriptors()) {
                    VodAddress dst = desc.getVodAddress();
                    if (map.containsKey(dst)) {
                        neighbors[s][nn++] = map.get(dst);
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        }
    }
    // build distance matrix, clustering coefficient, average path length
    // diameter and average degrees
    {
        for (int i = 0; i < n; i++) {
            bfs(i, dist[i]);

            // we compute the clustering coefficient here
            int neigh[] = neighbors[i];
            if (neigh.length <= 1) {
                clustering[i] = 1.0;
                continue;
            }
            int edges = 0;

            for (int j = 0; j < neigh.length; j++) {
                for (int k = j + 1; k < neigh.length; k++) {
                    if (m[neigh[j]][neigh[k]] > 0 || m[neigh[k]][neigh[j]] > 0) {
                        ++edges;
                    }
                }
            }
            clustering[i] = ((edges * 2.0) / neigh.length) / (neigh.length - 1);
        }
        int k = 0;
        for (int i = 0; i < n; i++) {
            for (int j = 0; j < n; j++) {
                if (i == j)
                    continue;
                if (dist[i][j] == n) {
                    infinitePathCount++;
                    continue;
                }
                if (dist[i][j] > diameter) {
                    diameter = dist[i][j];
                }
                avgPathLength = (avgPathLength * k + dist[i][j]) / (k + 1);
                k++;
            }
            inStats.addValue(inDegree[i]);
            outStats.addValue(outDegree[i]);
            // avgIn = (avgIn * i + inDegree[i]) / (i + 1);
            // minIn = minIn > inDegree[i] ? inDegree[i] : minIn;
            // maxIn = maxIn < inDegree[i] ? inDegree[i] : maxIn;
            // avgOut = (avgOut * i + outDegree[i]) / (i + 1);
            avgClustering = (avgClustering * i + clustering[i]) / (i + 1);
        }
    }
}

From source file:se.sics.kompics.p2p.overlay.cyclon.GraphUtil.java

public GraphUtil(TreeMap<OverlayAddress, CyclonNeighbors> alivePeers) {
    super();/*from   w w w .j a  v a  2s . co m*/
    n = alivePeers.size();
    m = new byte[n][n];
    dist = new int[n][n];
    inDegree = new double[n];
    outDegree = new int[n];
    clustering = new double[n];
    a = new CyclonAddress[n];
    map = new HashMap<CyclonAddress, Integer>();
    neighbors = new int[n][];
    inStats = new SummaryStatistics();
    outStats = new SummaryStatistics();

    // map all alive nodes to a contiguous sequence of integers
    {
        int p = 0;
        for (OverlayAddress address : alivePeers.keySet()) {
            CyclonAddress src = (CyclonAddress) address;
            a[p] = src;
            map.put(src, p);
            p++;
        }
    }

    // build adjacency matrix
    int d = -1;
    {
        try {
            for (int s = 0; s < a.length; s++) {
                CyclonAddress src = a[s];
                CyclonNeighbors neigh = alivePeers.get(src);
                int nn = 0;
                for (CyclonNodeDescriptor desc : neigh.getDescriptors()) {
                    CyclonAddress dst = desc.getCyclonAddress();
                    if (!map.containsKey(dst)) {
                        continue;
                    }
                    d = map.get(dst);
                    m[s][d] = 1;
                    inDegree[d]++;
                    outDegree[s]++;
                    nn++;
                }
                neighbors[s] = new int[nn];
                nn = 0;
                for (CyclonNodeDescriptor desc : neigh.getDescriptors()) {
                    CyclonAddress dst = desc.getCyclonAddress();
                    if (map.containsKey(dst)) {
                        neighbors[s][nn++] = map.get(dst);
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        }
    }
    // build distance matrix, clustering coefficient, average path length
    // diameter and average degrees
    {
        for (int i = 0; i < n; i++) {
            bfs(i, dist[i]);

            // we compute the clustering coefficient here
            int neigh[] = neighbors[i];
            if (neigh.length <= 1) {
                clustering[i] = 1.0;
                continue;
            }
            int edges = 0;

            for (int j = 0; j < neigh.length; j++) {
                for (int k = j + 1; k < neigh.length; k++) {
                    if (m[neigh[j]][neigh[k]] > 0 || m[neigh[k]][neigh[j]] > 0) {
                        ++edges;
                    }
                }
            }
            clustering[i] = ((edges * 2.0) / neigh.length) / (neigh.length - 1);
        }
        int k = 0;
        for (int i = 0; i < n; i++) {
            for (int j = 0; j < n; j++) {
                if (i == j)
                    continue;
                if (dist[i][j] == n) {
                    infinitePathCount++;
                    continue;
                }
                if (dist[i][j] > diameter) {
                    diameter = dist[i][j];
                }
                avgPathLength = (avgPathLength * k + dist[i][j]) / (k + 1);
                k++;
            }
            inStats.addValue(inDegree[i]);
            outStats.addValue(outDegree[i]);
            // avgIn = (avgIn * i + inDegree[i]) / (i + 1);
            // minIn = minIn > inDegree[i] ? inDegree[i] : minIn;
            // maxIn = maxIn < inDegree[i] ? inDegree[i] : maxIn;
            // avgOut = (avgOut * i + outDegree[i]) / (i + 1);
            avgClustering = (avgClustering * i + clustering[i]) / (i + 1);
        }
    }
}

From source file:org.egov.ptis.web.controller.rest.AssessmentServiceController.java

/**
 * This method is used to get all list of floor numbers.
 * @return responseJson - server response in JSON format
 * @throws IOException/*from  w  w  w  .jav a 2  s. c  o  m*/
 */
@RequestMapping(value = "/property/floors", produces = APPLICATION_JSON_VALUE)
public String getFloors() throws IOException {
    List<MasterCodeNamePairDetails> mstrCodeNamePairDetailsList = new ArrayList<>();
    ErrorDetails errorDetails = null;
    String responseJson = null;
    //Boolean isAuthenticatedUser = propertyExternalService.authenticateUser(username, password);
    Boolean isAuthenticatedUser = true;
    if (isAuthenticatedUser) {
        TreeMap<Integer, String> floorMap = PropertyTaxConstants.FLOOR_MAP;

        Set<Integer> keys = floorMap.keySet();
        for (Integer key : keys) {
            MasterCodeNamePairDetails mstrCodeNamePairDetails = new MasterCodeNamePairDetails();
            mstrCodeNamePairDetails.setCode(key.toString());
            mstrCodeNamePairDetails.setName(floorMap.get(key));
            mstrCodeNamePairDetailsList.add(mstrCodeNamePairDetails);
        }
        responseJson = getJSONResponse(mstrCodeNamePairDetailsList);
    } else {
        errorDetails = getInvalidCredentialsErrorDetails();
        responseJson = getJSONResponse(errorDetails);
    }
    return responseJson;
}

From source file:eu.edisonproject.training.wsd.DisambiguatorImpl.java

private Term mapreduceDisambiguate(String term, Set<Term> possibleTerms, Set<String> ngarms,
        double minimumSimilarity) throws IOException {
    String filePath = ".." + File.separator + "etc" + File.separator + "Avro Document" + File.separator + term
            + File.separator + term + ".avro";
    TermAvroSerializer ts = new TermAvroSerializer(filePath, Term.getClassSchema());
    List<CharSequence> empty = new ArrayList<>();
    empty.add("");
    for (Term t : possibleTerms) {
        List<CharSequence> nuid = t.getNuids();
        if (nuid == null || nuid.isEmpty() || nuid.contains(null)) {
            t.setNuids(empty);/*w  w w . j  a  va 2 s .  c  o  m*/
        }

        List<CharSequence> buids = t.getBuids();
        if (buids == null || buids.isEmpty() || buids.contains(null)) {
            t.setBuids(empty);
        }
        List<CharSequence> alt = t.getAltLables();
        if (alt == null || alt.isEmpty() || alt.contains(null)) {
            t.setAltLables(empty);
        }
        List<CharSequence> gl = t.getGlosses();
        if (gl == null || gl.isEmpty() || gl.contains(null)) {
            t.setGlosses(empty);
        } else {
            StringBuilder glosses = new StringBuilder();
            for (CharSequence n : gl) {
                glosses.append(n).append(" ");
            }
            gl = new ArrayList<>();
            stemer.setDescription(glosses.toString());
            gl.add(stemer.execute());
            t.setGlosses(gl);

        }
        List<CharSequence> cat = t.getCategories();
        if (cat == null || cat.contains(null)) {
            t.setCategories(empty);
        }
        ts.serialize(t);
    }
    Term context = new Term();
    context.setUid("context");
    StringBuilder glosses = new StringBuilder();
    context.setLemma(term);
    context.setOriginalTerm(term);
    context.setUrl("empty");
    for (String n : ngarms) {
        glosses.append(n).append(" ");
    }
    List<CharSequence> contextGlosses = new ArrayList<>();
    stemer.setDescription(glosses.toString());

    contextGlosses.add(stemer.execute());
    context.setGlosses(contextGlosses);
    List<CharSequence> nuid = context.getNuids();
    if (nuid == null || nuid.isEmpty() || nuid.contains(null)) {
        context.setNuids(empty);
    }

    List<CharSequence> buids = context.getBuids();
    if (buids == null || buids.isEmpty() || buids.contains(null)) {
        context.setBuids(empty);
    }
    List<CharSequence> alt = context.getAltLables();
    if (alt == null || alt.isEmpty() || alt.contains(null)) {
        context.setAltLables(empty);
    }
    List<CharSequence> gl = context.getGlosses();
    if (gl == null || gl.isEmpty() || gl.contains(null)) {
        context.setGlosses(empty);
    }
    List<CharSequence> cat = context.getCategories();
    if (cat == null || cat.contains(null)) {
        context.setCategories(empty);
    }
    ts.serialize(context);
    ts.close();

    ITFIDFDriver tfidfDriver = new TFIDFDriverImpl(term);
    tfidfDriver.executeTFIDF(new File(filePath).getParent());

    Map<CharSequence, Map<String, Double>> featureVectors = CSVFileReader
            .tfidfResult2Map(TFIDFDriverImpl.OUTPUT_PATH4 + File.separator + "part-r-00000");
    Map<String, Double> contextVector = featureVectors.remove("context");

    Map<CharSequence, Double> scoreMap = new HashMap<>();
    for (CharSequence key : featureVectors.keySet()) {
        Double similarity = cosineSimilarity(contextVector, featureVectors.get(key));
        scoreMap.put(key, similarity);
    }
    if (scoreMap.isEmpty()) {
        return null;
    }

    ValueComparator bvc = new ValueComparator(scoreMap);
    TreeMap<CharSequence, Double> sorted_map = new TreeMap(bvc);
    sorted_map.putAll(scoreMap);

    Iterator<CharSequence> it = sorted_map.keySet().iterator();
    CharSequence winner = it.next();

    Double s1 = scoreMap.get(winner);
    if (s1 < getMinimumSimilarity()) {
        return null;
    }

    return getTermFromDB(winner);

}

From source file:com.eucalyptus.tests.awssdk.S3ListMpuTests.java

@Test
public void maxKeys() throws Exception {
    testInfo(this.getClass().getSimpleName() + " - maxKeys");
    try {/*from   w  ww .  j av a  2s .c  om*/
        int numKeys = 3 + random.nextInt(3); // 3-5 keys
        int numUploads = 3 + random.nextInt(3); // 3-5 uploads
        int maxUploads = numUploads - 1;
        int totalUploads = numKeys * numUploads;
        int counter = (totalUploads % maxUploads == 0) ? (totalUploads / maxUploads)
                : ((totalUploads / maxUploads) + 1);

        print("Number of keys: " + numKeys);
        print("Number of uploads per key: " + numUploads);
        print("Number of mpus per listing: " + maxUploads);

        // Generate some mpus
        TreeMap<String, List<String>> keyUploadIdMap = initiateMpusForMultipleKeys(s3ClientA, accountA, numKeys,
                numUploads, new String());

        Iterator<String> keyIterator = keyUploadIdMap.keySet().iterator();
        String key = keyIterator.next();
        Iterator<String> uploadIdIterator = keyUploadIdMap.get(key).iterator();
        String uploadId = null;

        String nextKeyMarker = null;
        String nextUploadIdMarker = null;
        MultipartUploadListing listing = null;

        for (int i = 1; i <= counter; i++) {
            if (i != counter) {
                listing = listMpu(s3ClientA, accountA, bucketName, nextKeyMarker, nextUploadIdMarker, null,
                        null, maxUploads, true);
                assertTrue(
                        "Expected " + maxUploads + " mpu listings, but got "
                                + listing.getMultipartUploads().size(),
                        maxUploads == listing.getMultipartUploads().size());
            } else {
                listing = listMpu(s3ClientA, accountA, bucketName, nextKeyMarker, nextUploadIdMarker, null,
                        null, maxUploads, false);
                assertTrue(
                        "Expected " + totalUploads + " mpu listings, but got "
                                + listing.getMultipartUploads().size(),
                        totalUploads == listing.getMultipartUploads().size());
            }

            for (MultipartUpload mpu : listing.getMultipartUploads()) {
                if (!uploadIdIterator.hasNext()) {
                    key = keyIterator.next();
                    uploadIdIterator = keyUploadIdMap.get(key).iterator();
                }
                uploadId = uploadIdIterator.next();
                assertTrue("Expected key to be " + key + ", but got " + mpu.getKey(), mpu.getKey().equals(key));
                assertTrue("Expected upload ID to be " + uploadId + ", but got " + mpu.getUploadId(),
                        mpu.getUploadId().equals(uploadId));
                verifyCommonElements(mpu);
                totalUploads--;
            }

            nextKeyMarker = key;
            nextUploadIdMarker = uploadId;
        }
    } catch (AmazonServiceException ase) {
        printException(ase);
        assertThat(false, "Failed to run maxKeys");
    }
}