Example usage for java.util TreeSet isEmpty

List of usage examples for java.util TreeSet isEmpty

Introduction

In this page you can find the example usage for java.util TreeSet isEmpty.

Prototype

public boolean isEmpty() 

Source Link

Document

Returns true if this set contains no elements.

Usage

From source file:org.unitime.timetable.onlinesectioning.OnlineSectioningServerImpl.java

@Override
public void remove(Offering offering) {
    iLock.writeLock().lock();/*from   w  ww . j  a va  2  s  . c o m*/
    try {
        for (Course course : offering.getCourses()) {
            CourseInfo ci = iCourseForId.get(course.getId());
            if (ci != null) {
                TreeSet<CourseInfo> courses = iCourseForName.get(ci.toString());
                if (courses != null) {
                    courses.remove(ci);
                    if (courses.isEmpty()) {
                        iCourseForName.remove(ci.toString());
                    } else if (courses.size() == 1) {
                        for (CourseInfo x : courses)
                            x.setHasUniqueName(true);
                    }
                }
                iCourseForId.remove(ci.getUniqueId());
                iCourses.remove(ci);
            }
            iCourseTable.remove(course.getId());
        }
        iOfferingTable.remove(offering.getId());
        for (Config config : offering.getConfigs()) {
            for (Subpart subpart : config.getSubparts())
                for (Section section : subpart.getSections())
                    iClassTable.remove(section.getId());
            for (Enrollment enrollment : new ArrayList<Enrollment>(config.getEnrollments()))
                enrollment.variable().unassign(0);
        }
    } finally {
        iLock.writeLock().unlock();
    }
}

From source file:org.apache.lens.cube.parse.StorageCandidate.java

/**
 * Gets FactPartitions for the given fact using the following logic
 *
 * 1. Find the max update interval that will be used for the query. Lets assume time
 * range is 15 Sep to 15 Dec and the fact has two storage with update periods as MONTHLY,DAILY,HOURLY.
 * In this case the data for [15 sep - 1 oct)U[1 Dec - 15 Dec) will be answered by DAILY partitions
 * and [1 oct - 1Dec) will be answered by MONTHLY partitions. The max interavl for this query will be MONTHLY.
 *
 * 2.Prune Storgaes that do not fall in the queries time range.
 * {@link org.apache.lens.cube.metadata.CubeMetastoreClient#isStorageTableCandidateForRange(String, Date, Date)}
 *
 * 3. Iterate over max interavl . In out case it will give two months Oct and Nov. Find partitions for
 * these two months.Check validity of FactPartitions for Oct and Nov
 * via {@link #updatePartitionStorage(FactPartition)}.
 * If the partition is missing, try getting partitions for the time range form other update periods (DAILY,HOURLY).
 * This is achieved by calling getPartitions() recursively but passing only 2 update periods (DAILY,HOURLY)
 *
 * 4.If the monthly partitions are found, check for lookahead partitions and call getPartitions recursively for the
 * remaining time intervals i.e, [15 sep - 1 oct) and [1 Dec - 15 Dec)
 *
 * TODO union : Move this into util./*from  w  ww .j a  va 2 s  . c  o m*/
 */
private boolean getPartitions(Date fromDate, Date toDate, String partCol, Set<FactPartition> partitions,
        TreeSet<UpdatePeriod> updatePeriods, boolean addNonExistingParts, boolean failOnPartialData,
        PartitionRangesForPartitionColumns missingPartitions) throws LensException {
    if (fromDate.equals(toDate) || fromDate.after(toDate)) {
        return true;
    }
    if (updatePeriods == null || updatePeriods.isEmpty()) {
        return false;
    }

    UpdatePeriod maxInterval = CubeFactTable.maxIntervalInRange(fromDate, toDate, updatePeriods);
    if (maxInterval == null) {
        log.info("No max interval for range: {} to {}", fromDate, toDate);
        return false;
    }

    if (maxInterval == UpdatePeriod.CONTINUOUS
            && cubeQueryContext.getRangeWriter().getClass().equals(BetweenTimeRangeWriter.class)) {
        FactPartition part = new FactPartition(partCol, fromDate, maxInterval, null, partWhereClauseFormat);
        partitions.add(part);
        part.getStorageTables().add(storageTable);
        part = new FactPartition(partCol, toDate, maxInterval, null, partWhereClauseFormat);
        partitions.add(part);
        part.getStorageTables().add(storageTable);
        this.participatingUpdatePeriods.add(maxInterval);
        log.info("Added continuous fact partition for storage table {}", storageName);
        return true;
    }

    if (!getCubeMetastoreClient().partColExists(this.getFact(), storageName, partCol)) {
        log.info("{} does not exist in {}", partCol, name);
        return false;
    }

    Date maxIntervalStorageTblStartDate = getStorageTableStartDate(maxInterval);
    Date maxIntervalStorageTblEndDate = getStorageTableEndDate(maxInterval);

    TreeSet<UpdatePeriod> remainingIntervals = new TreeSet<>(updatePeriods);
    remainingIntervals.remove(maxInterval);
    if (!isCandidatePartiallyValidForTimeRange(maxIntervalStorageTblStartDate, maxIntervalStorageTblEndDate,
            fromDate, toDate)) {
        //Check the time range in remainingIntervals as maxInterval is not useful
        return getPartitions(fromDate, toDate, partCol, partitions, remainingIntervals, addNonExistingParts,
                failOnPartialData, missingPartitions);
    }

    Date ceilFromDate = DateUtil.getCeilDate(
            fromDate.after(maxIntervalStorageTblStartDate) ? fromDate : maxIntervalStorageTblStartDate,
            maxInterval);
    Date floorToDate = DateUtil.getFloorDate(
            toDate.before(maxIntervalStorageTblEndDate) ? toDate : maxIntervalStorageTblEndDate, maxInterval);
    if (ceilFromDate.equals(floorToDate) || floorToDate.before(ceilFromDate)) {
        return getPartitions(fromDate, toDate, partCol, partitions, remainingIntervals, addNonExistingParts,
                failOnPartialData, missingPartitions);
    }

    int lookAheadNumParts = getConf().getInt(CubeQueryConfUtil.getLookAheadPTPartsKey(maxInterval),
            CubeQueryConfUtil.DEFAULT_LOOK_AHEAD_PT_PARTS);
    TimeRange.Iterable.Iterator iter = TimeRange.iterable(ceilFromDate, floorToDate, maxInterval, 1).iterator();
    // add partitions from ceilFrom to floorTo
    while (iter.hasNext()) {
        Date dt = iter.next();
        Date nextDt = iter.peekNext();
        FactPartition part = new FactPartition(partCol, dt, maxInterval, null, partWhereClauseFormat);
        updatePartitionStorage(part);
        log.debug("Storage tables containing Partition {} are: {}", part, part.getStorageTables());
        if (part.isFound()) {
            log.debug("Adding existing partition {}", part);
            partitions.add(part);
            this.participatingUpdatePeriods.add(maxInterval);
            log.debug("Looking for look ahead process time partitions for {}", part);
            if (processTimePartCol == null) {
                log.debug("processTimePartCol is null");
            } else if (partCol.equals(processTimePartCol)) {
                log.debug("part column is process time col");
            } else if (updatePeriods.first().equals(maxInterval)) {
                log.debug("Update period is the least update period");
            } else if ((iter.getNumIters() - iter.getCounter()) > lookAheadNumParts) {
                // see if this is the part of the last-n look ahead partitions
                log.debug("Not a look ahead partition");
            } else {
                log.debug("Looking for look ahead process time partitions for {}", part);
                // check if finer partitions are required
                // final partitions are required if no partitions from
                // look-ahead
                // process time are present
                TimeRange.Iterable.Iterator processTimeIter = TimeRange
                        .iterable(nextDt, lookAheadNumParts, maxInterval, 1).iterator();
                while (processTimeIter.hasNext()) {
                    Date pdt = processTimeIter.next();
                    Date nextPdt = processTimeIter.peekNext();
                    FactPartition processTimePartition = new FactPartition(processTimePartCol, pdt, maxInterval,
                            null, partWhereClauseFormat);
                    updatePartitionStorage(processTimePartition);
                    if (processTimePartition.isFound()) {
                        log.debug("Finer parts not required for look-ahead partition :{}", part);
                    } else {
                        log.debug("Looked ahead process time partition {} is not found", processTimePartition);
                        TreeSet<UpdatePeriod> newset = new TreeSet<UpdatePeriod>();
                        newset.addAll(updatePeriods);
                        newset.remove(maxInterval);
                        log.debug("newset of update periods:{}", newset);
                        if (!newset.isEmpty()) {
                            // Get partitions for look ahead process time
                            log.debug("Looking for process time partitions between {} and {}", pdt, nextPdt);
                            Set<FactPartition> processTimeParts = getPartitions(
                                    TimeRange.builder().fromDate(pdt).toDate(nextPdt)
                                            .partitionColumn(processTimePartCol).build(),
                                    newset, true, failOnPartialData, missingPartitions);
                            log.debug("Look ahead partitions: {}", processTimeParts);
                            TimeRange timeRange = TimeRange.builder().fromDate(dt).toDate(nextDt).build();
                            for (FactPartition pPart : processTimeParts) {
                                log.debug("Looking for finer partitions in pPart: {}", pPart);
                                for (Date date : timeRange.iterable(pPart.getPeriod(), 1)) {
                                    FactPartition innerPart = new FactPartition(partCol, date,
                                            pPart.getPeriod(), pPart, partWhereClauseFormat);
                                    updatePartitionStorage(innerPart);
                                    innerPart.setFound(pPart.isFound());
                                    if (innerPart.isFound()) {
                                        partitions.add(innerPart);
                                    }
                                }
                                log.debug("added all sub partitions blindly in pPart: {}", pPart);
                            }
                        }
                    }
                }
            }
        } else {
            log.info("Partition:{} does not exist in any storage table", part);
            if (!getPartitions(dt, nextDt, partCol, partitions, remainingIntervals, false, failOnPartialData,
                    missingPartitions)) {
                log.debug("Adding non existing partition {}", part);
                if (addNonExistingParts) {
                    // Add non existing partitions for all cases of whether we populate all non existing or not.
                    this.participatingUpdatePeriods.add(maxInterval);
                    missingPartitions.add(part);
                    if (!failOnPartialData) {
                        partitions.add(part);
                        part.getStorageTables().add(storageTable);
                    }
                } else {
                    log.info("No finer granualar partitions exist for {}", part);
                    return false;
                }
            } else {
                log.debug("Finer granualar partitions added for {}", part);
            }
        }
    }

    return getPartitions(fromDate, ceilFromDate, partCol, partitions, remainingIntervals, addNonExistingParts,
            failOnPartialData, missingPartitions)
            && getPartitions(floorToDate, toDate, partCol, partitions, remainingIntervals, addNonExistingParts,
                    failOnPartialData, missingPartitions);
}

From source file:net.spfbl.core.Core.java

public static String getSequence(TreeSet<String> set, String demiliter) {
    if (set == null) {
        return null;
    } else if (demiliter == null) {
        return null;
    } else if (set.isEmpty()) {
        return null;
    } else {//from  w  ww  . j  ava2s .c om
        StringBuilder builder = new StringBuilder();
        for (String token : set) {
            if (builder.length() > 0) {
                builder.append(demiliter);
            }
            builder.append(token);
        }
        return builder.toString();
    }
}

From source file:org.gvsig.framework.web.service.impl.OGCInfoServiceImpl.java

/**
 * Recursive method to add list layers get by the WMSServer into tree list
 *
 * @param children Represents child layers of parentNode
 * @param tree Tree of layers/* www. j a v  a 2  s.  c  o  m*/
 * @param crs CRS that must have the layers to add these to the tree
 * @param parentNode Represents parent layer
 * @param layersMap Represents the map that contains the layers obtained
 * @param isCalledByWizard Indicate if the method is called by the wizard
 */
private void generateWMSChildrenNodes(ArrayList<WMSLayer> children, List<TreeNode> tree,
        TreeSet<String> listCrs, TreeNode parentNode,
        Map<String, org.gvsig.framework.web.ogc.WMSLayer> layersMap, WMSInfo wmsInfo) {
    for (WMSLayer layerChild : children) {
        // get crs (srs) (belong to layer)
        Vector crsVector = layerChild.getAllSrs();
        // Only get the layers with have crs parameter or if crs is null
        if (listCrs.isEmpty() || !Collections.disjoint(crsVector, listCrs)) {

            ArrayList<WMSLayer> layerChildChildren = layerChild.getChildren();
            TreeNode layerChildNode = new TreeNode(layerChild.getName());
            layerChildNode.setTitle(layerChild.getTitle());

            // Get the children and their information
            if (layerChildChildren.isEmpty()) {
                layerChildNode.setFolder(false);

                // Add layer to layer map
                org.gvsig.framework.web.ogc.WMSLayer wmsLayer = new org.gvsig.framework.web.ogc.WMSLayer();
                TreeSet<String> crsSet = new TreeSet<String>();
                crsSet.addAll(layerChild.getAllSrs());
                wmsLayer.setCrs(crsSet);
                List<WMSStyle> wmsStyles = createListWMSStyles(layerChild.getStyles());
                wmsLayer.setStyles(wmsStyles);
                wmsLayer.setTitle(layerChild.getTitle());
                wmsLayer.setName(layerChild.getName());
                layersMap.put(layerChild.getName(), wmsLayer);

                // add to wmsinfo the layers supported by this layer
                TreeSet<String> crsSupported = wmsInfo.getCrsSupported();
                crsSupported.addAll(layerChild.getAllSrs());
                wmsInfo.setCrsSupported(crsSupported);

                //create one child for each crs of the layer
                if (listCrs.isEmpty() || listCrs.size() > 1) {
                    for (String crs : crsSet) {
                        if (StringUtils.isNotEmpty(crs) && (listCrs.isEmpty() || listCrs.contains(crs))) {
                            TreeNode crsNode = new TreeNode(crs);
                            crsNode.setHideCheckbox(true);
                            crsNode.setUnselectable(true);
                            crsNode.setIconclass(" ");
                            layerChildNode.addChild(crsNode);
                        }
                    }
                }
            } else {
                layerChildNode.setFolder(true);
                layerChildNode.setExpanded(true);
                generateWMSChildrenNodes(layerChildChildren, tree, listCrs, layerChildNode, layersMap, wmsInfo);
            }
            parentNode.addChild(layerChildNode);
        }
    }

}

From source file:org.zanata.service.impl.GlossaryFileServiceImpl.java

private HGlossaryEntry transferGlossaryEntryAndSave(GlossaryEntry from, Optional<LocaleId> transLocaleId,
        boolean onlyTransferTransTerm) {
    HGlossaryEntry to = getOrCreateGlossaryEntry(from, getContentHash(from));
    to.setSourceRef(from.getSourceReference());
    to.setPos(from.getPos());//w w  w  . ja va2  s .  c o  m
    to.setDescription(from.getDescription());
    String qualifiedName = GlossaryUtil.GLOBAL_QUALIFIED_NAME;
    if (from.getQualifiedName() != null && StringUtils.isNotBlank(from.getQualifiedName().getName())) {
        qualifiedName = from.getQualifiedName().getName();
    }
    Glossary glossary = glossaryDAO.getGlossaryByQualifiedName(qualifiedName);
    if (glossary == null) {
        glossary = new Glossary(qualifiedName);
        glossaryDAO.persistGlossary(glossary);
        executeCommit();
    }
    to.setGlossary(glossary);
    TreeSet<String> warningMessage = Sets.newTreeSet();
    List<GlossaryTerm> filteredTerms = from.getGlossaryTerms().stream().filter(term -> {
        if (term == null || term.getLocale() == null) {
            return false;
        }
        if (onlyTransferTransTerm && term.getLocale().equals(from.getSrcLang())) {
            return false;
        }
        if (onlyTransferTransTerm && transLocaleId.isPresent()
                && !term.getLocale().equals(transLocaleId.get())) {
            return false;
        }
        return true;
    }).collect(Collectors.toList());
    for (GlossaryTerm term : filteredTerms) {
        HLocale termHLocale = localeServiceImpl.getByLocaleId(term.getLocale());
        if (termHLocale != null) {
            // check if there's existing term
            HGlossaryTerm hGlossaryTerm = getOrCreateGlossaryTerm(to, termHLocale, term);
            hGlossaryTerm.setComment(term.getComment());
            hGlossaryTerm.setLastModifiedBy(authenticatedAccount.getPerson());
            to.getGlossaryTerms().put(termHLocale, hGlossaryTerm);
        } else {
            warningMessage.add(term.getLocale().toString());
        }
    }
    if (!warningMessage.isEmpty()) {
        log.warn("Language {} is not enabled in Zanata. Term in the language will be ignored.",
                StringUtils.join(warningMessage, ","));
    }
    glossaryDAO.makePersistent(to);
    return to;
}

From source file:com.chingo247.structureapi.commands.StructureCommands.java

private static String getInfo(StructureNode structure, IColors colors) {
    TreeSet<String> owners = Sets.newTreeSet(ALPHABETICAL_ORDER);

    List<SettlerNode> mastersNode = structure.getOwnerDomain().getOwners(OwnerType.MASTER);
    for (SettlerNode master : mastersNode) {
        owners.add(master.getName());//from   w  ww  .  j av a2s . c om
    }

    String ownershipString = "";
    int size = owners.size();
    int count = 0;

    for (String ownership : owners) {
        ownershipString += colors.yellow() + ownership + colors.reset();
        count++;
        if (count != size) {
            ownershipString += ", ";
        }

    }

    String line = "#" + colors.gold() + structure.getId() + " " + colors.blue() + structure.getName() + "\n"
            + colors.reset() + "World: " + colors.yellow() + structure.getWorldName() + "\n";

    Vector position = structure.getOrigin();
    line += colors.reset() + "Location: " + colors.yellow() + "X: " + colors.reset() + position.getX() + " "
            + colors.yellow() + "Y: " + colors.reset() + position.getY() + " " + colors.yellow() + "Z: "
            + colors.reset() + position.getZ() + "\n";

    CuboidRegion region = structure.getCuboidRegion();

    line += colors.reset() + "Width: " + colors.yellow() + region.getWidth() + colors.reset() + " Height: "
            + colors.yellow() + region.getHeight() + colors.reset() + " Length: " + colors.yellow()
            + region.getLength() + colors.reset() + "\n";

    line += colors.reset() + "Status: " + colors.reset() + getStatusString(structure, colors) + "\n";

    if (structure.getPrice() > 0) {
        line += colors.reset() + "Value: " + colors.yellow() + structure.getPrice() + "\n";
    }

    if (!owners.isEmpty()) {
        if (owners.size() == 1) {
            line += colors.reset() + "Owners(MASTER): " + ownershipString + "\n";
        } else {
            line += colors.reset() + "Owners(MASTER): \n" + ownershipString + "\n";
        }
    }

    if (structure.getNode().hasProperty("WGRegion")) {
        line += colors.reset() + "WorldGuard-Region: " + colors.yellow()
                + structure.getNode().getProperty("WGRegion");
    }
    return line;

}

From source file:pku.sei.checkedcoverage.slicing.Slicer.java

/**
 * select the last location that was not checked, and create new slice criterion for it.
 * remove the relative lines from unchecked lines, until all location are sliced.
 * @return the new created slice location for every class.
 *///from  w  w w  .j a  v  a  2  s . co m
public static HashMap<String, TreeSet<Long>> sliceForUnchecked() {
    System.out.println("Trying to add checks");
    HashMap<String, TreeSet<Long>> sliceCreated = new HashMap<String, TreeSet<Long>>();
    HashMap<String, HashSet<Instruction>> uncheckedMap = getUncheckedMap();
    Iterator<String> it = uncheckedMap.keySet().iterator();
    List<String> cris = new ArrayList<String>();
    int crisNr = 0;
    while (it.hasNext()) {
        String key = it.next();
        sliceCreated.put(key, new TreeSet<Long>());
        HashSet<Instruction> insts = uncheckedMap.get(key);
        TreeSet<Integer> unchecked = new TreeSet<Integer>();
        HashMap<Integer, String> critInfoMap = new HashMap<Integer, String>();
        HashMap<Integer, HashSet<String>> varsMap = new HashMap<Integer, HashSet<String>>();
        for (Instruction inst : insts) {
            if (inst.getType().equals(InstructionType.FIELD) || inst.getType().equals(InstructionType.VAR)) {
                unchecked.add(inst.getLineNumber());
                if (!critInfoMap.containsKey(inst.getLineNumber())) {
                    critInfoMap.put(inst.getLineNumber(),
                            inst.getMethod().getReadClass().getName() + "." + inst.getMethod().getName());
                }
                if (!varsMap.containsKey(inst.getLineNumber())) {
                    varsMap.put(inst.getLineNumber(), new HashSet<String>());
                }
                if (inst.getType().equals(InstructionType.FIELD)) {
                    FieldInstruction fieldinst = (FieldInstruction) inst;
                    varsMap.get(inst.getLineNumber()).add(fieldinst.getFieldName());
                } else if (inst.getType().equals(InstructionType.VAR)) {
                    VarInstruction varinst = (VarInstruction) inst;
                    if (varinst.getOpcode() == Opcodes.DSTORE || varinst.getOpcode() == Opcodes.ASTORE
                            || varinst.getOpcode() == Opcodes.LSTORE || varinst.getOpcode() == Opcodes.ISTORE
                            || varinst.getOpcode() == Opcodes.FSTORE || varinst.getOpcode() == Opcodes.RET) {
                        String varname = inst.getMethod().getLocalVariables()[varinst.getLocalVarIndex()]
                                .getName();
                        varsMap.get(inst.getLineNumber()).add(varname);
                    }
                }
            }
        }
        while (!unchecked.isEmpty()) {
            int last = unchecked.last();
            String cri = critInfoMap.get(last) + ":" + last + ":*";
            cris.add(cri);
            System.out.println(++crisNr + " new check(s) added!" + cri);
            unchecked.removeAll(sliceUnchecked(cri));
            sliceCreated.get(key).add((long) last);
            unchecked.remove(last);
        }
    }
    System.out.println("Done!");
    return sliceCreated;
}

From source file:org.sleuthkit.autopsy.keywordsearch.AccountsText.java

/**
 * Initialize this object with information about which pages/chunks have
 * hits. Multiple calls will not change the initial results.
 *//*  w  w  w .j a  v  a 2  s .  c o m*/
synchronized private void loadPageInfo() {
    if (isPageInfoLoaded) {
        return;
    }
    if (chunkId != null) {//if a chunk is specified, only show that chunk/page
        this.numberPagesForFile = 1;
        this.currentPage = chunkId;
        this.numberOfHitsPerPage.put(chunkId, 0);
        this.pages.add(chunkId);
        this.currentHitPerPage.put(chunkId, 0);
    } else {
        try {
            this.numberPagesForFile = solrServer.queryNumFileChunks(this.solrObjectId);
        } catch (KeywordSearchModuleException | NoOpenCoreException ex) {
            LOGGER.log(Level.WARNING, "Could not get number pages for content " + this.solrDocumentId, ex); //NON-NLS
            return;
        }

        //if has chunks, get pages with hits
        TreeSet<Integer> sortedPagesWithHits = new TreeSet<>();
        SolrQuery q = new SolrQuery();
        q.setShowDebugInfo(DEBUG); //debug
        q.setQuery(queryString);
        q.setFields(Server.Schema.ID.toString()); //for this case we only need the document ids
        q.addFilterQuery(
                Server.Schema.ID.toString() + ":" + this.solrObjectId + Server.CHUNK_ID_SEPARATOR + "*");

        try {
            QueryResponse response = solrServer.query(q, METHOD.POST);
            for (SolrDocument resultDoc : response.getResults()) {
                final String resultDocumentId = resultDoc.getFieldValue(Server.Schema.ID.toString()).toString();
                // Put the solr chunk id in the map
                String resultChunkID = StringUtils.substringAfter(resultDocumentId, Server.CHUNK_ID_SEPARATOR);
                if (StringUtils.isNotBlank(resultChunkID)) {
                    sortedPagesWithHits.add(Integer.parseInt(resultChunkID));
                } else {
                    sortedPagesWithHits.add(0);
                }
            }

        } catch (KeywordSearchModuleException | NoOpenCoreException | NumberFormatException ex) {
            LOGGER.log(Level.WARNING, "Error executing Solr highlighting query: " + keywords, ex); //NON-NLS
        }

        //set page to first page having highlights
        if (sortedPagesWithHits.isEmpty()) {
            this.currentPage = 0;
        } else {
            this.currentPage = sortedPagesWithHits.first();
        }

        for (Integer page : sortedPagesWithHits) {
            numberOfHitsPerPage.put(page, 0); //unknown number of matches in the page
            pages.add(page);
            currentHitPerPage.put(page, 0); //set current hit to 0th
        }
    }

    isPageInfoLoaded = true;
}

From source file:org.opensolaris.opengrok.web.PageConfig.java

/**
 * Same as {@link #getRequestedProjects()}, but with a variable cookieName
 * and parameter name. This way it is trivial to implement a project filter
 * ...//from  ww w  . j  a v a 2 s  .c  om
 *
 * @param paramName the name of the request parameter, which possibly
 * contains the project list in question.
 * @param cookieName name of the cookie which possible contains project
 * lists used as fallback
 * @return a possible empty set but never {@code null}.
 */
protected SortedSet<String> getRequestedProjects(String paramName, String cookieName) {
    TreeSet<String> set = new TreeSet<>();
    List<Project> projects = getEnv().getProjects();
    if (projects == null) {
        return set;
    }
    if (projects.size() == 1 && authFramework.isAllowed(req, projects.get(0))) {
        set.add(projects.get(0).getDescription());
        return set;
    }
    List<String> vals = getParamVals(paramName);
    for (String s : vals) {
        Project x = Project.getByDescription(s);
        if (x != null && authFramework.isAllowed(req, x)) {
            set.add(s);
        }
    }
    if (set.isEmpty()) {
        List<String> cookies = getCookieVals(cookieName);
        for (String s : cookies) {
            Project x = Project.getByDescription(s);
            if (x != null && authFramework.isAllowed(req, x)) {
                set.add(s);
            }
        }
    }
    if (set.isEmpty()) {
        Project defaultProject = env.getDefaultProject();
        if (defaultProject != null && authFramework.isAllowed(req, defaultProject)) {
            set.add(defaultProject.getDescription());
        }
    }
    return set;
}

From source file:org.unitime.timetable.action.PersonalizedExamReportAction.java

protected long getMeetingComparable(Class_ clazz) {
    Assignment assignment = clazz.getCommittedAssignment();
    TreeSet meetings = (clazz.getEvent() == null ? null : new TreeSet(clazz.getEvent().getMeetings()));
    if (meetings != null && !meetings.isEmpty()) {
        return ((Meeting) meetings.first()).getMeetingDate().getTime();
    } else if (assignment != null) {
        return assignment.getTimeLocation().getStartSlot();
    }/*from   w w  w.ja v a 2s  .co m*/
    return -1;
}