Example usage for java.util TreeSet iterator

List of usage examples for java.util TreeSet iterator

Introduction

In this page you can find the example usage for java.util TreeSet iterator.

Prototype

public Iterator<E> iterator() 

Source Link

Document

Returns an iterator over the elements in this set in ascending order.

Usage

From source file:org.starnub.starnubserver.pluggable.PluggableManager.java

private TreeSet<UnloadedPluggable> getFiles(String directoryString, String... extensions) {
    File directoryFile = new File(directoryString);
    Collection<File> fileCollection = FileUtils.listFiles(directoryFile, extensions, false);
    ArrayList<File> files = new ArrayList<>(fileCollection);
    TreeSet<UnloadedPluggable> unloadedPluggables = new TreeSet<>();
    for (File file : files) {
        try {//from w  ww  .j  av a 2  s .co  m
            UnloadedPluggable futureUnloadedPluggable = new UnloadedPluggable(file);
            if (futureUnloadedPluggable.getDetails() != null) {
                String futureUnloadedPluggableName = futureUnloadedPluggable.getDetails().getNAME();
                double futureUnloadedPluggableVersion = futureUnloadedPluggable.getDetails().getVERSION();
                Iterator<UnloadedPluggable> iterator = unloadedPluggables.iterator();
                boolean removed = false;
                boolean exist = false;
                while (iterator.hasNext()) {
                    UnloadedPluggable alreadyUnloadedPluggable = iterator.next();
                    String alreadyUnloadedPluggableName = alreadyUnloadedPluggable.getDetails().getNAME();
                    if (futureUnloadedPluggableName.equalsIgnoreCase(alreadyUnloadedPluggableName)) {
                        exist = true;
                        double versionTest = alreadyUnloadedPluggable.getDetails().getVERSION();
                        if (futureUnloadedPluggableVersion > versionTest) {
                            iterator.remove();
                            removed = true;
                        }
                    }
                }
                if (removed || !exist) {
                    unloadedPluggables.add(futureUnloadedPluggable);
                }
            }
        } catch (DirectoryCreationFailed | MissingData | IOException e) {
            e.printStackTrace();
        }
    }
    for (UnloadedPluggable unloadedPluggable : unloadedPluggables) {
        new StarNubEvent("Unloaded_Pluggable_File_Loaded", unloadedPluggable);
    }
    return unloadedPluggables;
}

From source file:com.inmobi.conduit.local.LocalStreamService.java

public void createListing(FileSystem fs, FileStatus fileStatus, Map<FileStatus, String> results,
        Set<FileStatus> trashSet, Table<String, String, String> checkpointPaths) throws IOException {
    List<FileStatus> streamsFileStatus = new ArrayList<FileStatus>();
    FileSystem srcFs = FileSystem.get(srcCluster.getHadoopConf());
    for (String stream : streamsToProcess) {
        streamsFileStatus.add(srcFs.getFileStatus(new Path(srcCluster.getDataDir(), stream)));
    }//from ww  w .j a v  a 2  s  .c om
    for (FileStatus stream : streamsFileStatus) {
        String streamName = stream.getPath().getName();
        LOG.debug("createListing working on Stream [" + streamName + "]");
        FileStatus[] collectors;
        try {
            collectors = fs.listStatus(stream.getPath());
        } catch (FileNotFoundException ex) {
            collectors = new FileStatus[0];
        }
        long minOfLatestCollectorTimeStamp = -1;
        for (FileStatus collector : collectors) {
            TreeMap<String, FileStatus> collectorPaths = new TreeMap<String, FileStatus>();
            // check point for this collector
            String collectorName = collector.getPath().getName();
            String checkPointKey = getCheckPointKey(this.getClass().getSimpleName(), streamName, collectorName);

            String checkPointValue = null;
            byte[] value = checkpointProvider.read(checkPointKey);
            if (value == null) {
                // In case checkpointKey with newer name format is absent,read old
                // checkpoint key
                String oldCheckPointKey = streamName + collectorName;
                value = checkpointProvider.read(oldCheckPointKey);
            }
            if (value != null)
                checkPointValue = new String(value);
            LOG.debug("CheckPoint Key [" + checkPointKey + "] value [ " + checkPointValue + "]");
            FileStatus[] files = null;
            try {
                files = fs.listStatus(collector.getPath(), new CollectorPathFilter());
            } catch (FileNotFoundException e) {
            }

            if (files == null) {
                LOG.warn("No Files Found in the Collector " + collector.getPath() + " Skipping Directory");
                continue;
            }
            TreeSet<FileStatus> sortedFiles = new TreeSet<FileStatus>(new FileTimeStampComparator());
            String currentFile = getCurrentFile(fs, files, sortedFiles);
            LOG.debug("last file " + currentFile + " in the collector directory " + collector.getPath());

            Iterator<FileStatus> it = sortedFiles.iterator();
            numberOfFilesProcessed = 0;
            long latestCollectorFileTimeStamp = -1;
            while (it.hasNext() && numberOfFilesProcessed < filesPerCollector) {
                FileStatus file = it.next();
                LOG.debug("Processing " + file.getPath());
                /*
                 * fileTimeStamp value will be -1 for the files which are already processed
                 */
                long fileTimeStamp = processFile(file, currentFile, checkPointValue, fs, results,
                        collectorPaths, streamName);
                if (fileTimeStamp > latestCollectorFileTimeStamp) {
                    latestCollectorFileTimeStamp = fileTimeStamp;
                }
            }
            populateTrash(collectorPaths, trashSet);
            populateCheckpointPathForCollector(checkpointPaths, collectorPaths);

            if ((latestCollectorFileTimeStamp < minOfLatestCollectorTimeStamp
                    || minOfLatestCollectorTimeStamp == -1) && latestCollectorFileTimeStamp != -1) {
                minOfLatestCollectorTimeStamp = latestCollectorFileTimeStamp;
            }
        } // all files in a collector
        if (minOfLatestCollectorTimeStamp != -1) {
            lastProcessedFile.put(streamName, minOfLatestCollectorTimeStamp);
        } else {
            LOG.warn("No new files in " + streamName + " stream");
        }
    }
}

From source file:org.hyperic.hq.ui.action.portlet.metricviewer.ViewAction.java

public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request,
        HttpServletResponse response) throws Exception {

    HttpSession session = request.getSession();
    WebUser user = SessionUtils.getWebUser(session);
    DashboardConfig dashConfig = dashboardManager
            .findDashboard((Integer) session.getAttribute(Constants.SELECTED_DASHBOARD_ID), user, authzBoss);

    if (dashConfig == null) {
        return new ActionRedirect("/Dashboard.do");
    }//from  ww  w  .j a  v a  2 s.  c  o  m

    ConfigResponse dashPrefs = dashConfig.getConfig();

    int sessionId = user.getSessionId().intValue();
    long ts = System.currentTimeMillis();
    String token;
    try {
        token = RequestUtils.getStringParameter(request, "token");
    } catch (ParameterNotFoundException e) {
        token = null;
    }

    // For multi-portlet configuration
    String numKey = PropertiesForm.NUM_TO_SHOW;
    String resKey = PropertiesForm.RESOURCES;
    String resTypeKey = PropertiesForm.RES_TYPE;
    String metricKey = PropertiesForm.METRIC;
    String descendingKey = PropertiesForm.DECSENDING;
    String titleKey = PropertiesForm.TITLE;

    if (token != null) {
        numKey += token;
        resKey += token;
        resTypeKey += token;
        metricKey += token;
        descendingKey += token;
        titleKey += token;
    }

    JSONObject res = new JSONObject();
    if (token != null) {
        res.put("token", token);
    } else {
        res.put("token", JSONObject.NULL);
    }

    res.put("title", dashPrefs.getValue(titleKey, ""));

    // Load resources
    List<AppdefEntityID> entityIds = DashboardUtils.preferencesAsEntityIds(resKey, dashPrefs);
    AppdefEntityID[] arrayIds = entityIds.toArray(new AppdefEntityID[entityIds.size()]);
    int count = Integer.parseInt(dashPrefs.getValue(numKey, "10"));
    String metric = dashPrefs.getValue(metricKey, "");
    boolean isDescending = Boolean.valueOf(dashPrefs.getValue(descendingKey, "true")).booleanValue();

    // Validate
    if (arrayIds.length == 0 || count == 0 || metric.length() == 0) {
        res.put("metricValues", new JSONObject());
        response.getWriter().write(res.toString());
        return null;
    }

    Integer[] tids = new Integer[] { new Integer(metric) };
    List<MeasurementTemplate> metricTemplates = measurementBoss.findMeasurementTemplates(sessionId, tids,
            PageControl.PAGE_ALL);
    MeasurementTemplate template = (MeasurementTemplate) metricTemplates.get(0);

    String resource = dashPrefs.getValue(resTypeKey);
    AppdefEntityTypeID typeId = new AppdefEntityTypeID(resource);
    AppdefResourceTypeValue typeVal = appdefBoss.findResourceTypeById(sessionId, typeId);
    CacheData[] data = new CacheData[arrayIds.length];
    List<Integer> measurements = new ArrayList<Integer>(arrayIds.length);
    long interval = 0;
    ArrayList<String> toRemove = new ArrayList<String>();
    for (int i = 0; i < arrayIds.length; i++) {
        AppdefEntityID id = arrayIds[i];
        try {
            data[i] = loadData(sessionId, id, template);
        } catch (AppdefEntityNotFoundException e) {
            toRemove.add(id.getAppdefKey());
        }
        if (data[i] != null && data[i].getMeasurement() != null) {
            measurements.add(i, data[i].getMeasurement().getId());
            if (data[i].getMeasurement().getInterval() > interval) {
                interval = data[i].getMeasurement().getInterval();
            }
        } else {
            measurements.add(i, null);
        }
    }

    MetricValue[] vals = measurementBoss.getLastMetricValue(sessionId, measurements, interval);
    TreeSet<MetricSummary> sortedSet = new TreeSet<MetricSummary>(new MetricSummaryComparator(isDescending));
    for (int i = 0; i < data.length; i++) {
        // Only show resources with data
        if (vals[i] != null) {
            MetricSummary summary = new MetricSummary(data[i].getResource(), template, vals[i]);
            sortedSet.add(summary);
        }
    }

    JSONObject metricValues = new JSONObject();
    metricValues.put("resourceTypeName", typeVal.getName());
    metricValues.put("metricName", template.getName());
    ArrayList<JSONObject> values = new ArrayList<JSONObject>();
    for (Iterator<MetricSummary> i = sortedSet.iterator(); i.hasNext() && count-- > 0;) {
        MetricSummary s = i.next();
        JSONObject val = new JSONObject();
        val.put("value", s.getFormattedValue());
        val.put("resourceId", s.getAppdefResourceValue().getId());
        val.put("resourceTypeId", s.getAppdefResourceValue().getEntityId().getType());
        val.put("resourceName", StringEscapeUtils.escapeHtml(s.getAppdefResourceValue().getName()));
        values.add(val);
    }
    metricValues.put("values", values);
    res.put("metricValues", metricValues);

    response.getWriter().write(res.toString());

    log.debug("Metric viewer loaded in " + (System.currentTimeMillis() - ts) + " ms.");

    if (toRemove.size() > 0) {
        log.debug("Removing " + toRemove.size() + " missing resources.");
        DashboardUtils.removeResources((String[]) toRemove.toArray(new String[toRemove.size()]), resKey,
                dashPrefs);
    }

    return null;
}

From source file:org.apache.hadoop.chukwa.inputtools.mdl.TorqueInfoProcessor.java

private void getHodJobInfo() throws IOException {
    StringBuffer sb = new StringBuffer();
    sb.append(torqueBinDir).append("/qstat -a");

    String[] getQueueInfoCommand = new String[3];
    getQueueInfoCommand[0] = "ssh";
    getQueueInfoCommand[1] = torqueServer;
    getQueueInfoCommand[2] = sb.toString();

    String command = getQueueInfoCommand[0] + " " + getQueueInfoCommand[1] + " " + getQueueInfoCommand[2];
    ProcessBuilder pb = new ProcessBuilder(getQueueInfoCommand);

    Process p = pb.start();/*www  .j  av  a  2  s  .c  o  m*/

    Timer timeout = new Timer();
    TorqueTimerTask torqueTimerTask = new TorqueTimerTask(p, command);
    timeout.schedule(torqueTimerTask, TorqueTimerTask.timeoutInterval * 1000);

    BufferedReader result = new BufferedReader(new InputStreamReader(p.getInputStream()));
    ErStreamHandler errorHandler = new ErStreamHandler(p.getErrorStream(), command, true);
    errorHandler.start();

    String line = null;
    boolean start = false;
    TreeSet<String> jobsInTorque = new TreeSet<String>();
    while ((line = result.readLine()) != null) {
        if (line.startsWith("---")) {
            start = true;
            continue;
        }

        if (start) {
            String[] items = line.split("\\s+");
            if (items.length >= 10) {
                String hodIdLong = items[0];
                String hodId = hodIdLong.split("[.]")[0];
                String userId = items[1];
                String numOfMachine = items[5];
                String status = items[9];
                jobsInTorque.add(hodId);
                if (!currentHodJobs.containsKey(hodId)) {
                    TreeMap<String, String> aJobData = new TreeMap<String, String>();

                    aJobData.put("userId", userId);
                    aJobData.put("numOfMachine", numOfMachine);
                    aJobData.put("traceCheckCount", "0");
                    aJobData.put("process", "0");
                    aJobData.put("status", status);
                    currentHodJobs.put(hodId, aJobData);
                } else {
                    TreeMap<String, String> aJobData = currentHodJobs.get(hodId);
                    aJobData.put("status", status);
                    currentHodJobs.put(hodId, aJobData);
                } // if..else
            }
        }
    } // while

    try {
        errorHandler.join();
    } catch (InterruptedException ie) {
        log.error(ie.getMessage());
    }
    timeout.cancel();

    Set<String> currentHodJobIds = currentHodJobs.keySet();
    Iterator<String> currentHodJobIdsIt = currentHodJobIds.iterator();
    TreeSet<String> finishedHodIds = new TreeSet<String>();
    while (currentHodJobIdsIt.hasNext()) {
        String hodId = currentHodJobIdsIt.next();
        if (!jobsInTorque.contains(hodId)) {
            TreeMap<String, String> aJobData = currentHodJobs.get(hodId);
            String process = aJobData.get("process");
            if (process.equals("0") || process.equals("1")) {
                aJobData.put("status", "C");
            } else {
                finishedHodIds.add(hodId);
            }
        }
    } // while

    Iterator<String> finishedHodIdsIt = finishedHodIds.iterator();
    while (finishedHodIdsIt.hasNext()) {
        String hodId = finishedHodIdsIt.next();
        currentHodJobs.remove(hodId);
    }

}

From source file:org.pdfsam.console.business.pdf.handlers.SplitCmdExecutor.java

/**
 * Execute the split of a pdf document when split type is S_BLEVEL
 * /*from   w  ww  .  j  a va  2  s.  c om*/
 * @param inputCommand
 * @param bookmarksTable
 *            bookmarks table. It's populated only when splitting by bookmarks. If null or empty it's ignored
 * @throws Exception
 */
private void executeSplit(SplitParsedCommand inputCommand, Hashtable bookmarksTable) throws Exception {
    pdfReader = PdfUtility.readerFor(inputCommand.getInputFile());
    pdfReader.removeUnusedObjects();
    pdfReader.consolidateNamedDestinations();

    int n = pdfReader.getNumberOfPages();
    BookmarksProcessor bookmarkProcessor = new BookmarksProcessor(SimpleBookmark.getBookmark(pdfReader), n);
    int fileNum = 0;
    LOG.info("Found " + n + " pages in input pdf document.");

    Integer[] limits = inputCommand.getSplitPageNumbers();
    // limits list validation end clean
    TreeSet limitsList = validateSplitLimits(limits, n);
    if (limitsList.isEmpty()) {
        throw new SplitException(SplitException.ERR_NO_PAGE_LIMITS);
    }

    // HERE I'M SURE I'VE A LIMIT LIST WITH VALUES, I CAN START BOOKMARKS
    int currentPage;
    Document currentDocument = new Document(pdfReader.getPageSizeWithRotation(1));
    int relativeCurrentPage = 0;
    int endPage = n;
    int startPage = 1;
    PdfImportedPage importedPage;
    File tmpFile = null;
    File outFile = null;

    Iterator itr = limitsList.iterator();
    if (itr.hasNext()) {
        endPage = ((Integer) itr.next()).intValue();
    }
    for (currentPage = 1; currentPage <= n; currentPage++) {
        relativeCurrentPage++;
        // check if i've to read one more page or to open a new doc
        if (relativeCurrentPage == 1) {
            LOG.debug("Creating a new document.");
            fileNum++;
            tmpFile = FileUtility.generateTmpFile(inputCommand.getOutputFile());
            String bookmark = null;
            if (bookmarksTable != null && bookmarksTable.size() > 0) {
                bookmark = (String) bookmarksTable.get(new Integer(currentPage));
            }
            FileNameRequest request = new FileNameRequest(currentPage, fileNum, bookmark);
            outFile = new File(inputCommand.getOutputFile(), prefixParser.generateFileName(request));
            startPage = currentPage;
            currentDocument = new Document(pdfReader.getPageSizeWithRotation(currentPage));

            pdfWriter = new PdfSmartCopy(currentDocument, new FileOutputStream(tmpFile));

            // set creator
            currentDocument.addCreator(ConsoleServicesFacade.CREATOR);

            setCompressionSettingOnWriter(inputCommand, pdfWriter);
            setPdfVersionSettingOnWriter(inputCommand, pdfWriter, Character.valueOf(pdfReader.getPdfVersion()));

            currentDocument.open();
        }

        importedPage = pdfWriter.getImportedPage(pdfReader, currentPage);
        pdfWriter.addPage(importedPage);

        // if it's time to close the document
        if (currentPage == endPage) {
            LOG.info("Temporary document " + tmpFile.getName() + " done, now adding bookmarks...");
            // manage bookmarks
            List bookmarks = bookmarkProcessor.processBookmarks(startPage, endPage);
            if (bookmarks != null) {
                pdfWriter.setOutlines(bookmarks);
            }
            relativeCurrentPage = 0;
            currentDocument.close();
            FileUtility.renameTemporaryFile(tmpFile, outFile, inputCommand.isOverwrite());
            LOG.debug("File " + outFile.getCanonicalPath() + " created.");
            endPage = (itr.hasNext()) ? ((Integer) itr.next()).intValue() : n;
        }
        setPercentageOfWorkDone((currentPage * WorkDoneDataModel.MAX_PERGENTAGE) / n);
    }
    pdfReader.close();
    LOG.info("Split " + inputCommand.getSplitType() + " done.");
}

From source file:com.taobao.common.tfs.impl.LocalKey.java

public int getSegmentForWrite(List<SegmentData> segmentDataList, long offset, byte[] data, int start,
        int length) {
    long currentOffset = offset;
    int currentStart = start;
    int remainLength = length, writtenLength = 0, needWriteLength = 0, remainNwLength = 0, // remain_need_write_length
            totalLength = 0;//from   ww  w.ja  v a 2 s .c  o m
    List<SegmentInfo> gcSegmentList = new ArrayList<SegmentInfo>();
    SegmentInfo segmentInfo = new SegmentInfo();
    segmentDataList.clear();

    while (segmentDataList.size() < ClientConfig.BATCH_COUNT && remainLength > 0) {
        writtenLength = needWriteLength = 0;

        remainNwLength = Math.min(
                (ClientConfig.BATCH_COUNT - segmentDataList.size()) * ClientConfig.SEGMENT_LENGTH,
                remainLength);

        segmentInfo.setOffset(currentOffset);
        TreeSet<SegmentInfo> tailInfoSet = (TreeSet<SegmentInfo>) segmentInfoSet.tailSet(segmentInfo);

        if (tailInfoSet.size() == 0) {
            needWriteLength = remainNwLength;
            checkOverlap(segmentInfo, gcSegmentList);
        } else {
            SegmentInfo firstInfo = tailInfoSet.first();
            if (firstInfo.getOffset() != currentOffset) {
                needWriteLength = (int) (firstInfo.getOffset() - currentOffset);
                checkOverlap(segmentInfo, gcSegmentList);
            }
            if (needWriteLength > remainNwLength) {
                needWriteLength = remainNwLength;
            } else {
                Iterator it = tailInfoSet.iterator();
                remainNwLength -= needWriteLength;
                while (remainNwLength > 0 && it.hasNext()) {
                    SegmentInfo curInfo = (SegmentInfo) it.next();
                    int tmpCrc = 0, curLength = curInfo.getLength();
                    if (remainNwLength < curLength) {
                        log.info("segment length conflict: " + curLength + " <> " + remainNwLength);
                        needWriteLength += remainNwLength;
                        remainNwLength = 0;
                    } else if ((tmpCrc = TfsUtil.crc32(0, data, currentStart + needWriteLength,
                            curLength)) != curInfo.getCrc()) {
                        log.info("segment crc conflict: " + curInfo.getCrc() + " <> " + tmpCrc);
                        needWriteLength += curLength;
                        remainNwLength -= curLength;
                    } else { // full segment crc is correct use it
                        log.debug("segment data written: " + curInfo);
                        writtenLength += curLength;
                        remainNwLength = 0;
                        break;
                    }
                    gcSegmentList.add(curInfo);
                }
                if (!it.hasNext()) {
                    needWriteLength += remainNwLength;
                }
            }
        }

        getSegment(segmentDataList, currentOffset, data, currentStart, needWriteLength);
        totalLength = needWriteLength + writtenLength;
        remainLength -= totalLength;
        currentStart += totalLength;
        currentOffset += totalLength;
        gcSegment(gcSegmentList);
    }
    return (length - remainLength);
}

From source file:com.alfaariss.oa.profile.aselect.processor.handler.AbstractAPIHandler.java

/**
 * Verifies signatures for requests and the signed parameters are supplied as map.
 * @param sSignature the signature that must be verified
 * @param sKeyAlias the key alias/* www .ja v a  2s .  co  m*/
 * @param mapRequest the data that is signed
 * @return TRUE if the signature is valid
 * @throws ASelectException if verification failed
 */
protected boolean verifySignature(String sSignature, String sKeyAlias, Map<String, String> mapRequest)
        throws ASelectException {
    StringBuffer sbSignatureData = new StringBuffer();
    TreeSet<String> sortedSet = new TreeSet<String>(mapRequest.keySet());
    for (Iterator<String> iter = sortedSet.iterator(); iter.hasNext();) {
        String sKey = iter.next();
        sbSignatureData.append(mapRequest.get(sKey));
    }

    return verifySignature(sSignature, sKeyAlias, sbSignatureData.toString());
}

From source file:net.sourceforge.fenixedu.domain.serviceRequests.documentRequests.DiplomaRequest.java

/**
 * The DocumentRequestCreator should never have created Past Diploma
 * Requests as DiplomaRequests. This method can be used for data migrations,
 * but should be removed once all past diploma requests are migrated.
 *//*from w  w  w.j  ava 2  s  . c  om*/
@Deprecated
private boolean isPastDiplomaRequestHack() {
    TreeSet<AcademicServiceRequestSituation> sortedSituations = new TreeSet<AcademicServiceRequestSituation>(
            AcademicServiceRequestSituation.COMPARATOR_BY_MOST_RECENT_SITUATION_DATE_AND_ID);
    sortedSituations.addAll(getAcademicServiceRequestSituationsSet());

    AcademicServiceRequestSituation deliveredSituation, concludedSituation, receivedSituation, sentSituation,
            processedSituation, newSituation;
    try {
        Iterator<AcademicServiceRequestSituation> situationsIterator = sortedSituations.iterator();
        deliveredSituation = situationsIterator.next();
        concludedSituation = situationsIterator.next();
        receivedSituation = situationsIterator.next();
        sentSituation = situationsIterator.next();
        processedSituation = situationsIterator.next();
        newSituation = situationsIterator.next();
    } catch (NoSuchElementException ex) {
        return false;
    }

    if (!deliveredSituation.getAcademicServiceRequestSituationType()
            .equals(AcademicServiceRequestSituationType.DELIVERED)) {
        return false;
    }
    if (!deliveredSituation.getJustification().equals("-")) {
        return false;
    }
    if (!(deliveredSituation.getSituationDate().hourOfDay().get() == 0)) {
        return false;
    }
    if (!(deliveredSituation.getSituationDate().minuteOfHour().get() == 5)) {
        return false;
    }

    // #####################################################

    if (!concludedSituation.getCreator().equals(deliveredSituation.getCreator())) {
        return false;
    }
    if (!concludedSituation.getAcademicServiceRequestSituationType()
            .equals(AcademicServiceRequestSituationType.CONCLUDED)) {
        return false;
    }
    if (!concludedSituation.getJustification().equals("-")) {
        return false;
    }
    if (!(concludedSituation.getSituationDate().hourOfDay().get() == 0)) {
        return false;
    }
    if (!(concludedSituation.getSituationDate().minuteOfHour().get() == 4)) {
        return false;
    }

    // #####################################################

    if (!receivedSituation.getCreator().equals(deliveredSituation.getCreator())) {
        return false;
    }
    if (!receivedSituation.getAcademicServiceRequestSituationType()
            .equals(AcademicServiceRequestSituationType.RECEIVED_FROM_EXTERNAL_ENTITY)) {
        return false;
    }
    if (!receivedSituation.getJustification().equals("-")) {
        return false;
    }
    if (!(receivedSituation.getSituationDate().hourOfDay().get() == 0)) {
        return false;
    }
    if (!(receivedSituation.getSituationDate().minuteOfHour().get() == 3)) {
        return false;
    }

    // #####################################################

    if (!sentSituation.getCreator().equals(deliveredSituation.getCreator())) {
        return false;
    }
    if (!sentSituation.getAcademicServiceRequestSituationType()
            .equals(AcademicServiceRequestSituationType.SENT_TO_EXTERNAL_ENTITY)) {
        return false;
    }
    if (!sentSituation.getJustification().equals("-")) {
        return false;
    }
    if (!(sentSituation.getSituationDate().hourOfDay().get() == 0)) {
        return false;
    }
    if (!(sentSituation.getSituationDate().minuteOfHour().get() == 2)) {
        return false;
    }
    if (!sentSituation.getSituationDate().toLocalDate()
            .equals(receivedSituation.getSituationDate().toLocalDate())) {
        return false;
    }

    // #####################################################

    if (!processedSituation.getCreator().equals(deliveredSituation.getCreator())) {
        return false;
    }
    if (!processedSituation.getAcademicServiceRequestSituationType()
            .equals(AcademicServiceRequestSituationType.PROCESSING)) {
        return false;
    }
    if (!processedSituation.getJustification().equals("-")) {
        return false;
    }
    if (!(processedSituation.getSituationDate().hourOfDay().get() == 0)) {
        return false;
    }
    if (!(processedSituation.getSituationDate().minuteOfHour().get() == 1)) {
        return false;
    }
    if (!processedSituation.getSituationDate().toLocalDate()
            .equals(receivedSituation.getSituationDate().toLocalDate())) {
        return false;
    }

    // #####################################################

    if (!newSituation.getCreator().equals(deliveredSituation.getCreator())) {
        return false;
    }
    if (!newSituation.getAcademicServiceRequestSituationType()
            .equals(AcademicServiceRequestSituationType.NEW)) {
        return false;
    }
    if (!StringUtils.isEmpty(newSituation.getJustification())) {
        return false;
    }
    if (!(newSituation.getSituationDate().hourOfDay().get() == 0)) {
        return false;
    }
    if (!(newSituation.getSituationDate().minuteOfHour().get() == 0)) {
        return false;
    }
    if (!newSituation.getSituationDate().toLocalDate()
            .equals(receivedSituation.getSituationDate().toLocalDate())) {
        return false;
    }

    return true;
}

From source file:org.jboss.dashboard.factory.ComponentsTree.java

public void destroy() {
    // Stop components in reverse order of which they have been created.
    TreeSet sortedSet = new TreeSet(new Comparator() {
        public int compare(Object o1, Object o2) {
            Component c1 = (Component) o1;
            Component c2 = (Component) o2;
            return -new Long(c1.getCreationOrderNumber()).compareTo(new Long(c2.getCreationOrderNumber()));
        }//from ww w .  ja va2 s.c  o m
    });

    for (Iterator it = mappings.keySet().iterator(); it.hasNext();) {
        String key = (String) it.next();
        Component comp = (Component) mappings.get(key);
        if (Component.SCOPE_GLOBAL.equals(comp.getScope())) {
            Object object = null;
            try {
                object = comp.getTheInstance();
            } catch (LookupException e) {
                log.error("Error: ", e);
            }
            if (object != null && object instanceof FactoryLifecycle) {
                sortedSet.add(comp);
            }
        }
    }

    for (Iterator iterator = sortedSet.iterator(); iterator.hasNext();) {
        Component component = (Component) iterator.next();
        Object object = null;
        try {
            object = component.getTheInstance();
        } catch (LookupException e) {
            log.error("Error: ", e);
        }
        if (object != null && object instanceof FactoryLifecycle) {
            FactoryLifecycle ob = (FactoryLifecycle) object;
            try {
                ob.shutdown();
            } catch (Exception e) {
                log.error("Error shutting down " + component.getName() + ": ", e);
            }
        }
    }

}

From source file:geogebra.common.kernel.EquationSolver.java

/**
 * Calculates all roots of a polynomial given by eqn using Laguerres method.
 * Polishes roots found. The roots are stored in eqn again.
 *//*  w ww.j a  v a2 s.co  m*/
@SuppressWarnings("deprecation")
private int laguerreAllComplex(double[] real, double[] complex) {

    Complex[] complexRoots = null;
    try {
        if (laguerreSolver == null) {
            laguerreSolver = new LaguerreSolver();
        }
        complexRoots = laguerreSolver.solveAll(real, LAGUERRE_START);
    } catch (Exception e) {
        App.debug("Problem solving with LaguerreSolver" + e.getLocalizedMessage());
        return 0;
    }

    // sort by real part & remove duplicates

    TreeSet<Complex> sortedSet = new TreeSet<Complex>(getComparatorReal());

    for (int i = 0; i < complexRoots.length; i++) {
        sortedSet.add(complexRoots[i]);
    }

    int roots = 0;
    Complex temp;
    Iterator<Complex> iterator = sortedSet.iterator();
    while (iterator.hasNext()) {
        temp = iterator.next();
        real[roots] = temp.getReal();
        complex[roots] = temp.getImaginary();
        roots++;
    }

    return roots;
}