Example usage for java.util TreeMap entrySet

List of usage examples for java.util TreeMap entrySet

Introduction

In this page you can find the example usage for java.util TreeMap entrySet.

Prototype

EntrySet entrySet

To view the source code for java.util TreeMap entrySet.

Click Source Link

Document

Fields initialized to contain an instance of the entry set view the first time this view is requested.

Usage

From source file:ca.sqlpower.sqlobject.TestSQLTable.java

public void testGetDerivedInstance() throws Exception {
    SQLTable derivedTable;//  w  w  w.  j ava 2 s . co m
    SQLTable table1;
    // Check to make sure it can be added to a playpen like database
    SQLDatabase pp = new SQLDatabase();
    pp.setPlayPenDatabase(true);
    pp.setParent(new StubSQLObject());
    assertNotNull(table1 = db.getTableByName("REGRESSION_TEST1"));
    derivedTable = table1.createInheritingInstance(pp);

    TreeMap<String, Object> derivedPropertyMap = new TreeMap<String, Object>(BeanUtils.describe(derivedTable));
    TreeMap<String, Object> table1PropertyMap = new TreeMap<String, Object>(BeanUtils.describe(table1));

    table1PropertyMap.remove("parent");
    table1PropertyMap.remove("SQLParent");
    table1PropertyMap.remove("schemaName");
    table1PropertyMap.remove("schema");
    table1PropertyMap.remove("parentDatabase");
    table1PropertyMap.remove("shortDisplayName");
    table1PropertyMap.remove("UUID");
    table1PropertyMap.remove("workspaceContainer");
    table1PropertyMap.remove("runnableDispatcher");
    table1PropertyMap.remove("SPListeners");

    for (Map.Entry<String, Object> property : table1PropertyMap.entrySet()) {
        assertEquals("Property \"" + property.getKey() + "\" has changed;", property.getValue(),
                derivedPropertyMap.get(property.getKey()));
    }

}

From source file:edu.ehu.galan.lite.utils.wikiminer.WikiminnerHelper.java

/**
 *
 * @param pDoc/*from w w  w. ja  v a2s  . c  o  m*/
 * @param maxImagewidth
 * @param maxImageheight
 */
public void getMarkUpImages(Document pDoc, int maxImagewidth, int maxImageheight) {
    caches.initializeId2TopicMap(pDoc);
    //        for(Document doc: pCorpus.getDocQueue()){
    if (!localMode) {
        HashMap<Integer, Topic> cacheId = caches.getId2TopicMap();
        List<Topic> topicList = pDoc.getTopicList();
        Gson son = new GsonBuilder().create();
        int i = 0;
        List<Integer> invalidList = new ArrayList<>();
        i = 0;
        GSonMarkUp ex;
        try {
            logger.info("Getting markup from the mapped articles:");
            ProgressTracker tracker = new ProgressTracker((topicList.size() / maxTopics) + 1, "....",
                    this.getClass());
            while (i < topicList.size()) {
                String req = wikiminerUrl + "/services/exploreArticle?ids=";
                String cacheElem = "";
                int sum = 0;
                for (; i < topicList.size(); i++) {
                    int id = (topicList.get(i).getId());
                    cacheElem += id;
                    //                    if(id==18105){
                    //                        System.out.println(pTtopicList.get(i).toString());
                    //                    }
                    req = req + id + ",";
                    sum++;
                    if (sum == maxTopics / 2) {
                        break;
                    }
                }
                req = req.substring(0, req.length() - 1);
                Element elem = cache.get(cacheElem);
                HttpGet getRequest = null;
                if (elem == null) {
                    getRequest = new HttpGet(req + "&wikipedia=" + lang + "&markUp&images&maxImageWidth="
                            + maxImagewidth + "&maxImageHeight=" + maxImageheight + "&responseFormat=JSON");
                    getRequest.addHeader("accept", "application/json");
                    getRequest.addHeader("Accept-Encoding", "gzip");
                    HttpResponse response = httpClient.execute(getRequest);
                    GzipDecompressingEntity entity = new GzipDecompressingEntity(response.getEntity());
                    String jsonText = EntityUtils.toString(entity, StandardCharsets.UTF_8);
                    EntityUtils.consume(entity);
                    ex = son.fromJson(jsonText, GSonMarkUp.class);
                    elem = new Element(cacheElem, ex);
                    cache.put(elem);
                } else {
                    ex = (GSonMarkUp) elem.getObjectValue();
                }
                List<edu.ehu.galan.lite.utils.wikiminer.gsonReaders.markUp.ArticleList> artiList = ex
                        .getArticleList();
                int count = 0;
                for (edu.ehu.galan.lite.utils.wikiminer.gsonReaders.markUp.ArticleList articleList : artiList) {
                    int id = articleList.getId();
                    if (cacheId.containsKey(id)) {
                        Topic top = cacheId.get(id);
                        count++;
                        addInfo2ArticleMarkUp(top, articleList, cacheId);
                        //break; if more are disambiguated with the same we get errors
                        //....
                    }
                }
            }
        } catch (IOException ex1) {
            logger.error(null, ex1);
        }
        //}
    } else {
        if (wikipedia != null) {

            logger.info("Getting Wiki data from the mapped articles:");
            List<Topic> topicList = pDoc.getTopicList();

            List<Integer> validList = new ArrayList<>();
            for (Topic top : topicList) {
                validList.add(top.getId());
            }
            ProgressTracker tracker = new ProgressTracker((validList.size()) + 1, "Getting data....",
                    this.getClass());
            Integer[] ids = validList.toArray(new Integer[validList.size()]);
            List<Integer> nullList = new ArrayList<>();
            List<Integer> invalidList = new ArrayList<>();
            List<Article> articleList = new ArrayList<>();
            List<Category> catList = new ArrayList<>();
            ArticleComparer artComparer = null;
            try {
                artComparer = new ArticleComparer(wikipedia);
            } catch (Exception ex) {
                logger.error("Error getting article comparer for this wikipedia");
            }
            if (artComparer == null) {
                logger.error("No comparisons available for this Wikipedia");
            }

            for (int i = 0; i < ids.length; i++) {
                Integer integer = ids[i];
                org.wikipedia.miner.model.Page pageIds = wikipedia.getPageById(integer);
                if (pageIds == null) {
                    nullList.add(integer);
                }
                switch (pageIds.getType()) {
                case disambiguation:
                    break;
                case article:
                    articleList.add((Article) pageIds);
                    break;
                default:
                    if (pageIds.getType() == org.wikipedia.miner.model.Page.PageType.category) {
                        catList.add((Category) pageIds);
                    } else {
                        nullList.add(integer);
                    }
                }

            }
            for (Article art : articleList) {
                Topic top = caches.getId2TopicMap().get(art.getId());
                top.setIsIndividual(true);

                String definition = null;
                definition = art.getFirstParagraphMarkup();
                top.setSourceDef(definition);
                if (definition == null) {
                    top.setSourceDef("");
                }
                Article.Label[] labels = art.getLabels();
                int total = 0;
                for (Article.Label lbl : labels) {
                    total += lbl.getLinkOccCount();
                }
                for (Article.Label lbl : labels) {
                    long occ = lbl.getLinkOccCount();
                    if (occ > 0) {
                        top.addLabel(lbl.getText());
                    }
                }
                TreeMap<String, String> translations = art.getTranslations();
                for (Map.Entry<String, String> entry : translations.entrySet()) {
                    top.addTranslation(entry.getKey(), entry.getValue());
                }
                Category[] parents = art.getParentCategories();
                // logger.info("retrieving parents from " + parents.length + " total");
                for (Category parent : parents) {
                    top.addParentCagegory(parent.getId(), parent.getTitle());
                }
                int start = 0;
                int max = 300;
                if (max <= 0) {
                    max = Integer.MAX_VALUE;
                } else {
                    max += start;
                }

                tracker.update();

            }

        }
    }
    caches.clearId2TopicMap();

}

From source file:org.openmicroscopy.shoola.agents.measurement.view.MeasurementViewerModel.java

/**
 * Returns all the figures hosted by the <code>ROIComponent</code>.
 *
 * @return See above.// ww w  .  j  av a2 s.c o m
 */
Collection<ROIFigure> getAllFigures() {
    TreeMap<Long, ROI> rois = roiComponent.getROIMap();
    List<ROIFigure> all = new ArrayList<ROIFigure>();
    if (rois == null)
        return all;
    Iterator i = rois.entrySet().iterator();
    Entry entry;
    ROI roi;
    List<ROIFigure> l;
    while (i.hasNext()) {
        entry = (Entry) i.next();
        roi = (ROI) entry.getValue();
        l = roi.getAllFigures();
        if (l != null && l.size() > 0)
            all.addAll(l);
    }
    return all;
}

From source file:uk.ac.kcl.texthunter.core.MLModelMaker.java

public void writeBestModelProbs(ArrayList<Prob> probValues, File file, TreeMap<String, String> observations)
        throws IOException {
    try (BufferedWriter bw3 = new BufferedWriter(
            new FileWriter(file.getAbsoluteFile() + File.separator + "bestModelProbabilities.tsv"))) {
        System.out.println("outputting probabilites");
        Collections.sort(probValues);
        String probHeader = "annotID\tobservation\t";
        for (Prob result : probValues) {
            probHeader = probHeader + result.getModelID() + "\t";
        }/* ww  w  .  ja  v a2 s . c om*/
        bw3.write(probHeader);
        bw3.newLine();
        for (Map.Entry<String, String> entry : observations.entrySet()) {
            String probLine = String.valueOf(entry.getKey()) + "\t" + entry.getValue() + "\t";
            for (Prob result : probValues) {
                probLine = probLine + String.valueOf(result.getMap().get(entry.getKey())) + "\t";
            }
            bw3.write(probLine);
            bw3.newLine();
        }
    }
}

From source file:org.eurocarbdb.application.glycoworkbench.plugin.SpectraPanel.java

public void addIsotopeCurves(TreeMap<Peak, Collection<Annotation>> annotations) {

    if (theDocument.size() == 0)
        return;// ww w  .j a v a 2 s  .  c o  m

    // remove old curves
    removeIsotopeCurves();

    // add curves
    if (annotations != null) {

        // set renderer
        if (show_all_isotopes) {
            thePlot.setRenderer(1, new StandardXYItemRenderer(StandardXYItemRenderer.SHAPES));
            thePlot.getRenderer(1).setShape(new Ellipse2D.Double(0, 0, 7, 7));
        } else
            thePlot.setRenderer(1, new StandardXYItemRenderer(StandardXYItemRenderer.LINES));

        MSUtils.IsotopeList isotope_list = new MSUtils.IsotopeList(show_all_isotopes);
        for (Map.Entry<Peak, Collection<Annotation>> pa : annotations.entrySet()) {
            Peak p = pa.getKey();
            double[] best_peak = theDocument.getPeakDataAt(current_ind).findNearestPeak(p.getMZ());

            // get compositions
            HashSet<Molecule> compositions = new HashSet<Molecule>();
            for (Annotation a : pa.getValue()) {
                try {
                    compositions.add(a.getFragmentEntry().fragment.computeIon());
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }

            // collect curves for this peak
            HashMap<String, double[][]> all_curves = new HashMap<String, double[][]>();
            for (Molecule m : compositions) {
                try {
                    double[][] data = MSUtils.getIsotopesCurve(1, m, show_all_isotopes);

                    // overlay the distribution with the existing list of isotopes
                    isotope_list.adjust(data, best_peak[0], best_peak[1]);

                    all_curves.put(m.toString(), data);
                } catch (Exception e) {
                    LogUtils.report(e);
                }
            }

            // add average curve for this peak
            if (all_curves.size() > 1) {
                double[][] data = MSUtils.average(all_curves.values(), show_all_isotopes);

                // add the average to the chart
                String name = "average-" + p.getMZ();
                theIsotopesDataset.addSeries(name, data);
                thePlot.getRenderer(1).setSeriesPaint(theIsotopesDataset.indexOf(name), Color.magenta);
                thePlot.getRenderer(1).setSeriesStroke(theIsotopesDataset.indexOf(name), new BasicStroke(2));

                // add the average to the isotope list
                isotope_list.add(data, false);
            } else if (all_curves.size() == 1) {
                // add the only curve to the isotope list
                isotope_list.add(all_curves.values().iterator().next(), false);
            }

            // add the other curves
            for (Map.Entry<String, double[][]> e : all_curves.entrySet()) {
                String name = e.getKey() + "-" + p.getMZ();
                theIsotopesDataset.addSeries(name, e.getValue());
                thePlot.getRenderer(1).setSeriesPaint(theIsotopesDataset.indexOf(name), Color.blue);
            }
        }

    }
    updateIntensityAxis();
}

From source file:com.clustercontrol.collect.composite.CollectSettingComposite.java

/**
 * ???????/*ww w.j av  a 2 s.  c  o m*/
 *
 * @param managers ?????
 */
public void setCollectorItemCombo() {

    List<String> allItemList = new ArrayList<>();
    // ??????
    List<String> selectList = this.m_collectGraphView.getFacilityTreeComposite().getCheckedTreeInfo();
    TreeMap<String, List<String>> managerFacilityMap = new TreeMap<>();
    for (String selectStr : selectList) {
        String[] nodeDetail = selectStr.split(SEPARATOR_HASH_EX_HASH);
        if (nodeDetail.length != 0
                && nodeDetail[nodeDetail.length - 1].equals(String.valueOf(FacilityConstant.TYPE_NODE))) {
            String facilityId = nodeDetail[nodeDetail.length - 2];
            String managerName = nodeDetail[0];
            List<String> facilityList = managerFacilityMap.get(managerName);
            if (facilityList == null) {
                facilityList = new ArrayList<String>();
                managerFacilityMap.put(managerName, facilityList);
            }
            if (!facilityList.contains(facilityId)) {
                m_log.debug("????? managerName:" + managerName + ", facilityId:"
                        + facilityId);
                facilityList.add(facilityId);
            }
        }
    }

    for (Map.Entry<String, List<String>> map : managerFacilityMap.entrySet()) {
        String managerName = map.getKey();
        List<String> facilityList = map.getValue();
        // ???
        List<CollectKeyInfoPK> collectKeyInfoList;
        try {
            CollectEndpointWrapper wrapper = CollectEndpointWrapper.getWrapper(managerName);
            collectKeyInfoList = wrapper.getItemCodeList(facilityList);
        } catch (InvalidRole_Exception e) {
            m_log.warn("setCollectorItemCombo() getItemCodeList, " + e.getMessage());
            MessageDialog.openInformation(null, Messages.getString("message"),
                    Messages.getString("message.accesscontrol.16"));
            return;
        } catch (InvalidUserPass_Exception | HinemosUnknown_Exception e) {
            // ?
            m_log.warn("setCollectorItemCombo() getItemCodeList, " + e.getMessage(), e);
            MessageDialog.openInformation(null, Messages.getString("failed"),
                    Messages.getString("message.hinemos.failure.unexpected") + ", "
                            + HinemosMessage.replace(e.getMessage()));
            return;
        }
        // DB??????
        for (CollectKeyInfoPK collectKeyInfo : collectKeyInfoList) {
            String itemName = collectKeyInfo.getItemName();
            String monitorId = collectKeyInfo.getMonitorId();
            String displayName = collectKeyInfo.getDisplayName();
            if (!allItemList.contains(itemName + SEPARATOR_AT + displayName + SEPARATOR_AT + monitorId)) {
                // itemCode@itemName ????
                allItemList.add(itemName + SEPARATOR_AT + displayName + SEPARATOR_AT + monitorId);
            }
        }
    }

    // ??????????
    for (String itemCodeName : allItemList) {
        String itemName = HinemosMessage.replace(itemCodeName.split(SEPARATOR_AT)[0]);
        String displayName = itemCodeName.split(SEPARATOR_AT)[1];
        String monitorId = itemCodeName.split(SEPARATOR_AT)[2];
        // itemName?displayName???????
        if (!displayName.equals("") && !itemName.endsWith("[" + displayName + "]")) {
            itemName += "[" + displayName + "]";
        }
        String itemNameStr = itemName + "(" + monitorId + ")";
        if (!(Arrays.asList(m_listCollectorItem.getItems())).contains(itemNameStr)) {
            m_listCollectorItem.add(itemNameStr);
            m_listCollectorItem.setData(itemNameStr, itemCodeName);
        }
    }

    // ??
    setDefaultItemInfo();
}

From source file:edu.ehu.galan.lite.utils.wikiminer.WikiminnerHelper.java

/**
 *
 * @param pDoc//from ww w.  j a  v  a  2  s . c  om
 * @param links
 */
public void getData(Document pDoc, boolean links) {
    caches.initializeId2TopicMap(pDoc);
    //        for(Document doc: pCorpus.getDocQueue()){
    if (!localMode) {
        HashMap<Integer, Topic> cacheId = caches.getId2TopicMap();
        List<Topic> topicList = pDoc.getTopicList();
        Gson son = new GsonBuilder().create();
        JsonParser parser = new JsonParser();
        int i = 0;
        List<Integer> invalidList = new ArrayList<>();
        i = 0;
        WikiDataArt ex;
        try {
            logger.info("Getting Wiki data from the mapped articles:");
            ProgressTracker tracker = new ProgressTracker((topicList.size() / maxTopics) + 1, "....",
                    this.getClass());
            while (i < topicList.size()) {
                String req = wikiminerUrl + "/services/exploreArticle?ids=";
                String cacheElem = "";
                int sum = 0;
                for (; i < topicList.size(); i++) {
                    int id = (topicList.get(i).getId());
                    cacheElem += id;
                    //                    if(id==18105){
                    //                        System.out.println(pTtopicList.get(i).toString());
                    //                    }
                    req = req + id + ",";
                    sum++;
                    if (sum == maxTopics) {
                        break;
                    }
                }
                req = req.substring(0, req.length() - 1);
                Element elem = cache.get(cacheElem);
                HttpGet getRequest = null;
                if (elem == null) {
                    if (links) {
                        getRequest = new HttpGet(req + "&wikipedia=" + lang
                                + "&parentCategories&translations&definition&labels&outLinks&inLinks&linkRelatedness&responseFormat=JSON&responseFormat=JSON");
                    } else {
                        getRequest = new HttpGet(req + "&wikipedia=" + lang
                                + "&parentCategories&translations&definition&labels&responseFormat=JSON&responseFormat=JSON");
                    }

                    getRequest.addHeader("accept", "application/json");
                    getRequest.addHeader("Accept-Encoding", "gzip");
                    HttpResponse response = httpClient.execute(getRequest);
                    GzipDecompressingEntity entity = new GzipDecompressingEntity(response.getEntity());
                    String jsonText = EntityUtils.toString(entity, StandardCharsets.UTF_8);
                    EntityUtils.consume(entity);
                    ex = son.fromJson(jsonText, WikiDataArt.class);
                    elem = new Element(cacheElem, ex);
                    cache.put(elem);
                } else {
                    ex = (WikiDataArt) elem.getObjectValue();
                }
                List<ArticleList> artiList = ex.getArticleList();
                int count = 0;
                for (ArticleList articleList : artiList) {
                    int id = articleList.getId();
                    if (cacheId.containsKey(id)) {
                        Topic top = cacheId.get(id);
                        count++;
                        addInfo2Article(top, articleList, cacheId);
                        //break; if more are disambiguated with the same we get errors
                        //....
                    }
                }
                List<Integer> invalids = ex.getInvalidList(); //may containg categories
                for (Integer integer : invalids) {
                    invalidList.add(integer);
                    if (cacheId.containsKey(integer)) {
                        Topic top = cacheId.get(integer);
                        top.addLabel(top.getTopic());
                        top.addLabel(top.getSourceTitle());
                    }
                }
                tracker.update();
            }
            i = 0;

            while (i < invalidList.size()) {
                int sum = 0;
                String req = wikiminerUrl + "/services/exploreCategory?ids=";
                for (; i < invalidList.size(); i++) {

                    int id = invalidList.get(i);
                    //                    if(id==18105){
                    //                        System.out.println(pTtopicList.get(i).toString());
                    //                    }
                    sum++;
                    req = req + id + ",";
                    if (sum == maxTopics) {
                        break;
                    }

                }
                req = req.substring(0, req.length() - 1);
                HttpGet getRequest = new HttpGet(req + "&wikipedia=" + lang
                        + "&parentCategories&translations&definition&labels&responseFormat=JSON");
                getRequest.addHeader("accept", "application/json");
                getRequest.addHeader("Accept-Encoding", "gzip");
                HttpResponse response = httpClient.execute(getRequest);

                //                WikiData ex = son.fromJson(response2String(response), WikiData.class);
                //                List<ArticleList> artiList = ex.getArticleList();
                //                for (ArticleList articleList : artiList) {
                //                    int id = articleList.getId();
                //                    for (Topic topic : topicList) {
                //                        if (topic.getId() == id) {
                //                            addInfo2Article(topic, articleList);
                //                            break;
                //                        }
                //                    }
                //                }
            }
        } catch (IOException ex1) {
            logger.error(null, ex1);
        }
        //}
    } else {
        if (wikipedia != null) {
            logger.info("Getting Wiki data from the mapped articles:");
            List<Topic> topicList = pDoc.getTopicList();

            List<Integer> validList = new ArrayList<>();
            for (Topic top : topicList) {
                validList.add(top.getId());
            }
            ProgressTracker tracker = new ProgressTracker((validList.size()) + 1, "Getting data....",
                    this.getClass());
            Integer[] ids = validList.toArray(new Integer[validList.size()]);
            List<Integer> nullList = new ArrayList<>();
            List<Integer> invalidList = new ArrayList<>();
            List<Article> articleList = new ArrayList<>();
            List<Category> catList = new ArrayList<>();
            ArticleComparer artComparer = null;
            try {
                artComparer = new ArticleComparer(wikipedia);
            } catch (Exception ex) {
                logger.error("Error getting article comparer for this wikipedia");
            }
            if (artComparer == null) {
                logger.error("No comparisons available for this Wikipedia");
            }

            for (int i = 0; i < ids.length; i++) {
                Integer integer = ids[i];
                org.wikipedia.miner.model.Page pageIds = wikipedia.getPageById(integer);
                if (pageIds == null) {
                    nullList.add(integer);
                }
                switch (pageIds.getType()) {
                case disambiguation:
                    break;
                case article:
                    articleList.add((Article) pageIds);
                    break;
                default:
                    if (pageIds.getType() == org.wikipedia.miner.model.Page.PageType.category) {
                        catList.add((Category) pageIds);
                    } else {
                        nullList.add(integer);
                    }
                }

            }
            for (Article art : articleList) {
                Topic top = caches.getId2TopicMap().get(art.getId());
                top.setIsIndividual(true);

                String definition = null;
                definition = art.getFirstParagraphMarkup();
                top.setSourceDef(definition);
                if (definition == null) {
                    top.setSourceDef("");
                }
                Article.Label[] labels = art.getLabels();
                int total = 0;
                for (Article.Label lbl : labels) {
                    total += lbl.getLinkOccCount();
                }
                for (Article.Label lbl : labels) {
                    long occ = lbl.getLinkOccCount();
                    if (occ > 0) {
                        top.addLabel(lbl.getText());
                    }
                }
                TreeMap<String, String> translations = art.getTranslations();
                for (Map.Entry<String, String> entry : translations.entrySet()) {
                    top.addTranslation(entry.getKey(), entry.getValue());
                }
                Category[] parents = art.getParentCategories();
                // logger.info("retrieving parents from " + parents.length + " total");
                for (Category parent : parents) {
                    top.addParentCagegory(parent.getId(), parent.getTitle());
                }
                int start = 0;
                int max = 300;
                if (max <= 0) {
                    max = Integer.MAX_VALUE;
                } else {
                    max += start;
                }
                if (links) {
                    Article[] linksOut = art.getLinksOut();
                    //logger.info("retrieving out links [" + start + "," + max + "] from " + linksOut.length + " total");
                    for (int i = start; i < max && i < linksOut.length; i++) {
                        if (artComparer != null) {
                            try {
                                top.addLinkOut(linksOut[i].getId(),
                                        artComparer.getRelatedness(art, linksOut[i]));
                            } catch (Exception ex) {
                                //                                logger.debug("error comparing articles" + ex);
                            }
                        }
                    }
                    start = 0;
                    max = 300;
                    if (max <= 0) {
                        max = Integer.MAX_VALUE;
                    } else {
                        max += start;
                    }
                    Article[] linksIn = art.getLinksIn();
                    // logger.info("retrieving in links [" + start + "," + max + "] from " + linksIn.length + " total");
                    for (int i = start; i < max && i < linksIn.length; i++) {
                        if (artComparer != null) {
                            try {
                                top.addLinkIn(linksIn[i].getId(), artComparer.getRelatedness(art, linksIn[i]));
                            } catch (Exception ex) {
                                //    logger.debug("error comparing articles" + ex);
                            }
                        }
                    }
                }
                tracker.update();

            }

        }
    }
    caches.clearId2TopicMap();

}

From source file:org.apache.hadoop.tools.HadoopArchives.java

/**
 * this method writes all the valid top level directories 
 * into the srcWriter for indexing. This method is a little
 * tricky. example- /* ww  w  .  j  ava2  s.  c o  m*/
 * for an input with parent path /home/user/ and sources 
 * as /home/user/source/dir1, /home/user/source/dir2 - this 
 * will output <source, dir, dir1, dir2> (dir means that source is a dir
 * with dir1 and dir2 as children) and <source/dir1, file, null>
 * and <source/dir2, file, null>
 * @param srcWriter the sequence file writer to write the
 * directories to
 * @param paths the source paths provided by the user. They
 * are glob free and have full path (not relative paths)
 * @param parentPath the parent path that you wnat the archives
 * to be relative to. example - /home/user/dir1 can be archived with
 * parent as /home or /home/user.
 * @throws IOException
 */
private void writeTopLevelDirs(SequenceFile.Writer srcWriter, List<Path> paths, Path parentPath)
        throws IOException {
    //add all the directories 
    List<Path> justDirs = new ArrayList<Path>();
    for (Path p : paths) {
        if (!p.getFileSystem(getConf()).isFile(p)) {
            justDirs.add(new Path(p.toUri().getPath()));
        } else {
            justDirs.add(new Path(p.getParent().toUri().getPath()));
        }
    }
    /* find all the common parents of paths that are valid archive
     * paths. The below is done so that we do not add a common path
     * twice and also we need to only add valid child of a path that
     * are specified the user.
     */
    TreeMap<String, HashSet<String>> allpaths = new TreeMap<String, HashSet<String>>();
    /* the largest depth of paths. the max number of times
     * we need to iterate
     */
    Path deepest = largestDepth(paths);
    Path root = new Path(Path.SEPARATOR);
    for (int i = parentPath.depth(); i < deepest.depth(); i++) {
        List<Path> parents = new ArrayList<Path>();
        for (Path p : justDirs) {
            if (p.compareTo(root) == 0) {
                //do nothing
            } else {
                Path parent = p.getParent();
                if (null != parent) {
                    if (allpaths.containsKey(parent.toString())) {
                        HashSet<String> children = allpaths.get(parent.toString());
                        children.add(p.getName());
                    } else {
                        HashSet<String> children = new HashSet<String>();
                        children.add(p.getName());
                        allpaths.put(parent.toString(), children);
                    }
                    parents.add(parent);
                }
            }
        }
        justDirs = parents;
    }
    Set<Map.Entry<String, HashSet<String>>> keyVals = allpaths.entrySet();
    for (Map.Entry<String, HashSet<String>> entry : keyVals) {
        final Path relPath = relPathToRoot(new Path(entry.getKey()), parentPath);
        if (relPath != null) {
            final String[] children = new String[entry.getValue().size()];
            int i = 0;
            for (String child : entry.getValue()) {
                children[i++] = child;
            }
            append(srcWriter, 0L, relPath.toString(), children);
        }
    }
}

From source file:org.openmicroscopy.shoola.agents.measurement.view.MeasurementViewerModel.java

/**
 * Sets the server ROIS./* w  w  w  .j a va  2  s.c o  m*/
 *
 * @param rois The collection of Rois.
 * @return See above.
 * @throws ROICreationException
 * @throws NoSuchROIException
 */
List<DataObject> setServerROI(Collection rois) throws ROICreationException, NoSuchROIException {
    List<DataObject> nodes = new ArrayList<DataObject>();
    measurementResults = rois;
    state = MeasurementViewer.READY;
    List<ROI> roiList = new ArrayList<ROI>();
    Iterator r = rois.iterator();
    ROIResult result;
    long userID = MeasurementAgent.getUserDetails().getId();
    while (r.hasNext()) {
        result = (ROIResult) r.next();
        roiList.addAll(roiComponent.loadROI(result.getFileID(), result.getROIs(), userID));
    }
    if (roiList == null)
        return nodes;
    Iterator<ROI> i = roiList.iterator();
    ROI roi;
    TreeMap<Coord3D, ROIShape> shapeList;
    Iterator j;
    ROIShape shape;
    Coord3D coord;
    int sizeZ = pixels.getSizeZ();
    int sizeT = pixels.getSizeT();
    Entry entry;
    int c;
    ROIFigure f;
    while (i.hasNext()) {
        roi = i.next();
        shapeList = roi.getShapes();
        j = shapeList.entrySet().iterator();
        while (j.hasNext()) {
            entry = (Entry) j.next();
            shape = (ROIShape) entry.getValue();
            coord = shape.getCoord3D();
            if (coord.getTimePoint() < sizeT && coord.getZSection() < sizeZ) {
                c = coord.getChannel();
                f = shape.getFigure();
                if (shape.getData() != null) {
                    nodes.add(shape.getData());
                }
                if (c >= 0 && f.isVisible())
                    f.setVisible(isChannelActive(c));
            }
        }
    }
    checkIfHasROIToDelete();
    return nodes;
}

From source file:org.jahia.ajax.gwt.helper.PublicationHelper.java

public Map<PublicationWorkflow, WorkflowDefinition> createPublicationWorkflows(
        List<GWTJahiaPublicationInfo> all) {
    final TreeMap<String, List<GWTJahiaPublicationInfo>> infosListByWorflowGroup = new TreeMap<String, List<GWTJahiaPublicationInfo>>();

    Map<String, String> workflowGroupToKey = new HashMap<String, String>();
    List<String> keys = new ArrayList<String>();

    for (GWTJahiaPublicationInfo info : all) {
        String workflowGroup = info.getWorkflowGroup();
        if (!infosListByWorflowGroup.containsKey(workflowGroup)) {
            infosListByWorflowGroup.put(workflowGroup, new ArrayList<GWTJahiaPublicationInfo>());
        }/*w w  w .  j  av a 2  s .  c om*/
        infosListByWorflowGroup.get(workflowGroup).add(info);
        if (info.getWorkflowDefinition() != null) {
            workflowGroupToKey.put(info.getWorkflowGroup(), info.getWorkflowDefinition());
            if (!keys.contains(info.getWorkflowDefinition())) {
                keys.add(info.getWorkflowDefinition());
            }
        }
    }

    Map<PublicationWorkflow, WorkflowDefinition> result = new LinkedHashMap<PublicationWorkflow, WorkflowDefinition>();

    Map<String, WorkflowDefinition> workflows = new HashMap<String, WorkflowDefinition>();
    for (String wf : keys) {
        WorkflowDefinition w = workflowService.getWorkflowDefinition(StringUtils.substringBefore(wf, ":"),
                StringUtils.substringAfter(wf, ":"), null);
        workflows.put(wf, w);
    }

    for (Map.Entry<String, List<GWTJahiaPublicationInfo>> entry : infosListByWorflowGroup.entrySet()) {
        result.put(new PublicationWorkflow(entry.getValue()),
                workflows.get(workflowGroupToKey.get(entry.getKey())));
    }

    return result;
}