Example usage for java.util TreeSet iterator

List of usage examples for java.util TreeSet iterator

Introduction

In this page you can find the example usage for java.util TreeSet iterator.

Prototype

public Iterator<E> iterator() 

Source Link

Document

Returns an iterator over the elements in this set in ascending order.

Usage

From source file:org.paxle.tools.charts.impl.gui.ChartServlet.java

@SuppressWarnings("unchecked")
public Iterator<RegularTimePeriod> getMergedPeriods(List<TimeSeries> seriesList) {
    TreeSet<RegularTimePeriod> periods = new TreeSet<RegularTimePeriod>();

    for (TimeSeries series : seriesList) {
        if (series.getItemCount() == 0)
            continue;
        periods.addAll(series.getTimePeriods());
    }/*from  w w w .  java2  s .  c  o  m*/

    return periods.iterator();
}

From source file:gemlite.shell.admin.dao.AdminDao.java

private void checkPr(TreeSet<String> ipSet, HashMap<String, Set<String>> nodeMap,
        HashMap<String, HashMap<Integer, String>> data, StringBuilder sb) {
    StringBuilder tmp = new StringBuilder();
    // ????bucket
    Iterator<String> ipIt = ipSet.iterator();
    while (ipIt.hasNext()) {
        String host = ipIt.next();
        // ?bucket?
        Iterator<String> pNodeIt = nodeMap.get(host).iterator();
        while (pNodeIt.hasNext()) {
            String pnode = pNodeIt.next();
            String phostAndNode = host + pnode;
            // /* www .j  a  v  a 2 s  .c o m*/
            String pKey = primary + phostAndNode;
            HashMap<Integer, String> pMap = data.get(pKey);
            Iterator<String> rNodeIt = nodeMap.get(host).iterator();
            // nodebucket?bucketId
            while (rNodeIt.hasNext()) {
                String rnode = rNodeIt.next();
                String rhostAndNode = host + rnode;
                // 
                String rKey = redundant + rhostAndNode;
                HashMap<Integer, String> rMap = data.get(rKey);
                if (rMap == null || rMap.size() == 0)
                    continue;
                // ?bucketId,?bucket
                Iterator<Integer> pBucketIt = pMap.keySet().iterator();
                while (pBucketIt.hasNext()) {
                    Integer bucketId = pBucketIt.next();
                    if (rMap.keySet().contains(bucketId)) {
                        tmp.append("primary bucket:" + phostAndNode + "-" + bucketId + " exist in redundant:"
                                + rhostAndNode).append("\n");
                    }
                }
            }
        }
    }

    if (tmp.length() > 0) {
        sb.append(tmp.toString());
    } else {
        sb.append("No primary and redundant bucket exist in the same host!");
    }
}

From source file:io.apicurio.hub.api.rest.impl.DesignsResourceTest.java

@Test
public void testListDesigns()
        throws ServerError, AlreadyExistsException, NotFoundException, ApiValidationException {
    ImportApiDesign info = new ImportApiDesign();
    info.setUrl("https://github.com/Apicurio/api-samples/blob/master/pet-store/pet-store.json");
    resource.importDesign(info);//w  ww.  ja  va2 s  .  c o m
    info = new ImportApiDesign();
    info.setUrl("https://github.com/Apicurio/api-samples/blob/master/apiman-rls/apiman-rls.json");
    resource.importDesign(info);

    Collection<ApiDesign> apis = resource.listDesigns();
    Assert.assertNotNull(apis);
    Assert.assertEquals(2, apis.size());

    TreeSet<ApiDesign> sortedApis = new TreeSet<>(new Comparator<ApiDesign>() {
        @Override
        public int compare(ApiDesign o1, ApiDesign o2) {
            return o1.getName().compareTo(o2.getName());
        }
    });
    sortedApis.addAll(apis);

    Iterator<ApiDesign> iter = sortedApis.iterator();
    ApiDesign design1 = iter.next();
    ApiDesign design2 = iter.next();

    Assert.assertNotNull(design1);
    Assert.assertNotNull(design2);

    Assert.assertEquals("apiman-rls.json", design1.getName());
    Assert.assertEquals("pet-store.json", design2.getName());

    String ghLog = github.auditLog();
    Assert.assertNotNull(ghLog);
    Assert.assertEquals("---\n"
            + "validateResourceExists::https://github.com/Apicurio/api-samples/blob/master/pet-store/pet-store.json\n"
            + "getResourceContent::https://github.com/Apicurio/api-samples/blob/master/pet-store/pet-store.json\n"
            + "validateResourceExists::https://github.com/Apicurio/api-samples/blob/master/apiman-rls/apiman-rls.json\n"
            + "getResourceContent::https://github.com/Apicurio/api-samples/blob/master/apiman-rls/apiman-rls.json\n"
            + "---", ghLog);
}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java

public void writeCASOptimisationConstraintsISARHP_ISARCEP(String a_OutputDir_String) throws Exception {
    File optimisationConstraints_File = new File(a_OutputDir_String, "OptimisationConstraints_CAS.csv");
    FileOutputStream a_FileOutputStream = new FileOutputStream(optimisationConstraints_File);
    OutputDataHandler_OptimisationConstraints.writeISARHP_ISARCEPHeader(a_FileOutputStream);
    a_FileOutputStream.flush();/*from  w  w  w  . ja va 2s.  c  om*/
    TreeSet a_OACodes_TreeSet = _CASDataHandler.getOACodes_TreeSet();
    CASDataRecord a_CASDataRecord;
    Iterator a_OACodes_Iterator = a_OACodes_TreeSet.iterator();
    String a_OACode;
    while (a_OACodes_Iterator.hasNext()) {
        a_OACode = (String) a_OACodes_Iterator.next();
        a_CASDataRecord = (CASDataRecord) _CASDataHandler.getDataRecord(a_OACode);
        Object[] fitnessCounts = GeneticAlgorithm_ISARHP_ISARCEP.getFitnessCounts(a_CASDataRecord);
        OutputDataHandler_OptimisationConstraints
                .writeISARHP_ISARCEP((HashMap<String, Integer>) fitnessCounts[0], a_OACode, a_FileOutputStream);
    }
    a_FileOutputStream.close();
}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java

public void writeCASOptimisationConstraintsHSARHP_ISARCEP(String a_OutputDir_String) throws Exception {
    File optimisationConstraints_File = new File(a_OutputDir_String, "OptimisationConstraints_CAS.csv");
    FileOutputStream a_FileOutputStream = new FileOutputStream(optimisationConstraints_File);
    OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEPHeader(a_FileOutputStream);
    a_FileOutputStream.flush();//  www.  j  av  a 2s. c  o m
    TreeSet a_OACodes_TreeSet = _CASDataHandler.getOACodes_TreeSet();
    CASDataRecord a_CASDataRecord;
    Iterator a_OACodes_Iterator = a_OACodes_TreeSet.iterator();
    String a_OACode;
    while (a_OACodes_Iterator.hasNext()) {
        a_OACode = (String) a_OACodes_Iterator.next();
        a_CASDataRecord = (CASDataRecord) _CASDataHandler.getDataRecord(a_OACode);
        Object[] fitnessCounts = GeneticAlgorithm_HSARHP_ISARCEP.getFitnessCounts(a_CASDataRecord);
        OutputDataHandler_OptimisationConstraints
                .writeHSARHP_ISARCEP((HashMap<String, Integer>) fitnessCounts[0], a_OACode, a_FileOutputStream);
    }
    a_FileOutputStream.close();
}

From source file:org.apache.myfaces.custom.schedule.AbstractCompactScheduleRenderer.java

/**
 * <p>/*from w  w w .  j av  a2 s  .c  o m*/
 * Draw the schedule entries in the specified day cell
 * </p>
 * 
 * @param context
 *            the FacesContext
 * @param schedule
 *            the schedule
 * @param day
 *            the day
 * @param writer
 *            the ResponseWriter
 * 
 * @throws IOException
 *             when the entries could not be drawn
 */
protected void writeEntries(FacesContext context, HtmlSchedule schedule, ScheduleDay day, ResponseWriter writer)
        throws IOException {
    final String clientId = schedule.getClientId(context);
    final FormInfo parentFormInfo = RendererUtils.findNestingForm(schedule, context);
    final String formId = parentFormInfo == null ? null : parentFormInfo.getFormName();
    final TreeSet entrySet = new TreeSet(comparator);

    for (Iterator entryIterator = day.iterator(); entryIterator.hasNext();) {
        ScheduleEntry entry = (ScheduleEntry) entryIterator.next();
        entrySet.add(entry);
    }

    if (entrySet.size() > 0) {
        writer.startElement(HTML.TABLE_ELEM, schedule);
        writer.writeAttribute(HTML.CELLPADDING_ATTR, "0", null);
        writer.writeAttribute(HTML.CELLSPACING_ATTR, "0", null);
        writer.writeAttribute(HTML.STYLE_ATTR, "width: 100%;", null);

        for (Iterator entryIterator = entrySet.iterator(); entryIterator.hasNext();) {
            ScheduleEntry entry = (ScheduleEntry) entryIterator.next();
            writer.startElement(HTML.TR_ELEM, schedule);
            writer.startElement(HTML.TD_ELEM, schedule);

            if (isSelected(schedule, entry)) {
                writer.writeAttribute(HTML.CLASS_ATTR, getStyleClass(schedule, "selected"), null);
            }

            //compose the CSS style for the entry box
            StringBuffer entryStyle = new StringBuffer();
            entryStyle.append("width: 100%;");
            String entryColor = getEntryRenderer(schedule).getColor(context, schedule, entry,
                    isSelected(schedule, entry));
            if (isSelected(schedule, entry) && entryColor != null) {
                entryStyle.append(" background-color: ");
                entryStyle.append(entryColor);
                entryStyle.append(";");
                entryStyle.append(" border-color: ");
                entryStyle.append(entryColor);
                entryStyle.append(";");
            }

            writer.writeAttribute(HTML.STYLE_ATTR, entryStyle.toString(), null);

            // draw the tooltip
            if (schedule.isTooltip()) {
                getEntryRenderer(schedule).renderToolTip(context, writer, schedule, entry,
                        isSelected(schedule, entry));
            }

            if (!isSelected(schedule, entry) && !schedule.isReadonly()) {
                writer.startElement(HTML.ANCHOR_ELEM, schedule);
                writer.writeAttribute(HTML.HREF_ATTR, "#", null);

                writer.writeAttribute(HTML.ONCLICK_ATTR,
                        "fireEntrySelected('" + formId + "', '" + clientId + "', '" + entry.getId() + "');",
                        null);
            }

            // draw the content
            getEntryRenderer(schedule).renderContent(context, writer, schedule, day, entry, true,
                    isSelected(schedule, entry));

            if (!isSelected(schedule, entry) && !schedule.isReadonly()) {
                writer.endElement(HTML.ANCHOR_ELEM);
            }

            writer.endElement(HTML.TD_ELEM);
            writer.endElement(HTML.TR_ELEM);
        }
        writer.endElement(HTML.TABLE_ELEM);
    }
}

From source file:org.processmining.analysis.performance.dottedchart.model.DottedChartModel.java

public void calculateStatisticsLogical() {
    String key = null;//from  www. j  av  a  2s  .  c  o m
    AbstractLogUnit item, itemOld;

    DescriptiveStatistics overallDS = timeStatistics.get(0);
    overallDS.clear();
    overallStatistics.clear();

    int index = -1;
    for (Iterator itSets = getItemMap().keySet().iterator(); itSets.hasNext();) {
        key = (String) itSets.next();
        index++;
        DescriptiveStatistics tempDS = timeStatistics.get(index + 1);
        tempDS.clear();
        if (typeHashMap.equals(ST_INST) && !instanceIDs.contains(key))
            continue;
        LogUnitList tempLogUnitList = (LogUnitList) getItemMap().get(key);

        itemOld = null;
        // iterate through items
        ArrayList<AbstractLogUnit> abst = tempLogUnitList.getEvents();
        TreeSet treeSet = new TreeSet<AbstractLogUnit>(abst);

        for (Iterator itItm = treeSet.iterator(); itItm.hasNext();) {
            item = (AbstractLogUnit) itItm.next();
            if (eventTypeToKeep != null && (!eventTypeToKeep.contains(item.getType())
                    || !instanceIDs.contains(item.getProcessInstance().getName())))
                continue;
            if (itemOld == null) {
                itemOld = item;
            } else {
                double temp = (double) item.getCurrentTimeStamp().getTime()
                        - (double) itemOld.getCurrentTimeStamp().getTime();
                overallStatistics.addValue(temp);
                tempDS.addValue(temp);
                itemOld = item;
            }
        }
        if (tempLogUnitList.getRightBoundaryTimestamp(eventTypeToKeep, instanceIDs) == null
                || tempLogUnitList.getLeftBoundaryTimestamp(eventTypeToKeep, instanceIDs) == null)
            overallDS.addValue(0);
        else
            overallDS.addValue((tempLogUnitList.getRightBoundaryTimestamp(eventTypeToKeep, instanceIDs)
                    .getTime()
                    - tempLogUnitList.getLeftBoundaryTimestamp(eventTypeToKeep, instanceIDs).getTime()));
    }
}

From source file:mi.RankInfo.java

void computeDistRanksFromRef() {
    int i, j, k;/*w w  w .j a va2 s.  c  o m*/

    TreeSet<DistInfo> nnList;

    DistInfo distInfo;

    for (j = 0; j < numRefPoints; j++) {
        FloatByteRcd b = this.r[j];
        nnList = new TreeSet<>();

        System.out.println("Computing distance info for ref-point: " + j);

        for (i = 0; i < numPoints; i++) {

            FloatByteRcd a = this.o[i];

            float dist = computeDistSquared(a.getVec(), b.getVec());
            distInfo = new DistInfo(i, j, dist);

            nnList.add(distInfo);
        }

        if (i % 1000 == 0)
            System.out.println("Trimming distance info for point: " + j);

        Iterator<DistInfo> iter = nnList.iterator();
        // Get the best-k
        for (k = 0; k < nTop; k++) {
            DistInfo nnInfo = iter.next();
            rinfos[nnInfo.refId].add(new RankInfo(nnInfo.id, k + 1));
        }
    }
}

From source file:org.processmining.analysis.performance.dottedchart.model.DottedChartModel.java

public void calculateStatistics() {
    String key = null;/*from  w  w  w. j a v  a  2s.  c  o  m*/
    AbstractLogUnit item = null;
    AbstractLogUnit itemOld;

    // get the descriptiveStatistics object
    DescriptiveStatistics overallDS = getTimeStatistics().get(0);
    overallDS.clear();
    overallStatistics.clear();

    // iterate through sets
    int index = -1;
    for (Iterator itSets = getItemMap().keySet().iterator(); itSets.hasNext();) {
        index++;
        key = (String) itSets.next();
        DescriptiveStatistics tempDS = getTimeStatistics().get(index + 1);
        tempDS.clear();

        if (typeHashMap.equals(ST_INST) && !instanceIDs.contains(key))
            continue;

        LogUnitList tempLogUnit = (LogUnitList) getItemMap().get(key);

        itemOld = null;
        // iterate through items
        ArrayList<AbstractLogUnit> abst = ((LogUnitList) getItemMap().get(key)).getEvents();
        TreeSet treeSet = new TreeSet<AbstractLogUnit>(abst);

        int k = 0;
        for (Iterator itItm = treeSet.iterator(); itItm.hasNext();) {
            k++;
            item = (AbstractLogUnit) itItm.next();

            if (eventTypeToKeep != null && (!eventTypeToKeep.contains(item.getType())
                    || !instanceIDs.contains(item.getProcessInstance().getName())))
                continue;
            if (itemOld == null) {
                itemOld = item;
            } else {
                double temp = (double) item.getCurrentTimeStamp().getTime()
                        - (double) itemOld.getCurrentTimeStamp().getTime();
                tempDS.addValue(temp);
                overallStatistics.addValue(temp);
                itemOld = item;
            }
        }
        for (int j = k; j < abst.size(); j++)
            tempDS.addValue(0);

        if (tempLogUnit.getRightBoundaryTimestamp(eventTypeToKeep, instanceIDs) == null
                || tempLogUnit.getLeftBoundaryTimestamp(eventTypeToKeep, instanceIDs) == null)
            overallDS.addValue(0);
        else
            overallDS.addValue((tempLogUnit.getRightBoundaryTimestamp(eventTypeToKeep, instanceIDs).getTime()
                    - tempLogUnit.getLeftBoundaryTimestamp(eventTypeToKeep, instanceIDs).getTime()));
    }

}

From source file:org.apache.hadoop.hdfs.server.namenode.NNStorageRetentionManager.java

/**
 * Delete old OIV fsimages. Since the target dir is not a full blown
 * storage directory, we simply list and keep the latest ones. For the
 * same reason, no storage inspector is used.
 *//*from  www .j a v a  2  s.c  o m*/
void purgeOldLegacyOIVImages(String dir, long txid) {
    File oivImageDir = new File(dir);
    final String oivImagePrefix = NameNodeFile.IMAGE_LEGACY_OIV.getName();
    String filesInStorage[];

    // Get the listing
    filesInStorage = oivImageDir.list(new FilenameFilter() {
        @Override
        public boolean accept(File dir, String name) {
            return name.matches(oivImagePrefix + "_(\\d+)");
        }
    });

    // Check whether there is any work to do.
    if (filesInStorage.length <= numCheckpointsToRetain) {
        return;
    }

    // Create a sorted list of txids from the file names.
    TreeSet<Long> sortedTxIds = new TreeSet<Long>();
    for (String fName : filesInStorage) {
        // Extract the transaction id from the file name.
        long fTxId;
        try {
            fTxId = Long.parseLong(fName.substring(oivImagePrefix.length() + 1));
        } catch (NumberFormatException nfe) {
            // This should not happen since we have already filtered it.
            // Log and continue.
            LOG.warn("Invalid file name. Skipping " + fName);
            continue;
        }
        sortedTxIds.add(Long.valueOf(fTxId));
    }

    int numFilesToDelete = sortedTxIds.size() - numCheckpointsToRetain;
    Iterator<Long> iter = sortedTxIds.iterator();
    while (numFilesToDelete > 0 && iter.hasNext()) {
        long txIdVal = iter.next().longValue();
        String fileName = NNStorage.getLegacyOIVImageFileName(txIdVal);
        LOG.info("Deleting " + fileName);
        File fileToDelete = new File(oivImageDir, fileName);
        if (!fileToDelete.delete()) {
            // deletion failed.
            LOG.warn("Failed to delete image file: " + fileToDelete);
        }
        numFilesToDelete--;
    }
}