Example usage for java.util TreeSet first

List of usage examples for java.util TreeSet first

Introduction

In this page you can find the example usage for java.util TreeSet first.

Prototype

public E first() 

Source Link

Usage

From source file:it.ozimov.springboot.templating.mail.service.PriorityQueueSchedulerService.java

private synchronized Optional<EmailSchedulingWrapper> dequeue() throws InterruptedException {
    EmailSchedulingWrapper emailSchedulingWrapper = null;
    timeOfNextScheduledMessage = null;//  w w  w.j a  v  a  2 s .  co m
    while (consumer.enabled() && isNull(emailSchedulingWrapper)) {
        //try to find a message in queue
        final long now = TimeUtils.now();
        for (final TreeSet<EmailSchedulingWrapper> queue : queues) {
            if (!queue.isEmpty()) {
                final long time = queue.first().getScheduledDateTime().toInstant().toEpochMilli();
                if (time - now <= DELTA) {
                    //message found!
                    emailSchedulingWrapper = queue.pollFirst();
                    break;
                } else if (isNull(timeOfNextScheduledMessage) || time < timeOfNextScheduledMessage) {
                    timeOfNextScheduledMessage = time;
                }

            }
        }
        if (isNull(emailSchedulingWrapper)) {
            //no message was found, let's sleep, some message may arrive in the meanwhile
            if (isNull(timeOfNextScheduledMessage)) { //all the queues are empty
                wait(); //the consumer starts waiting for a new email to be scheduled
            } else {
                final long waitTime = timeOfNextScheduledMessage - TimeUtils.now() - DELTA;
                if (waitTime > 0) {
                    wait(waitTime); //wait before sending the most imminent scheduled email
                }
            }
        }
    }
    //here emailSchedulingWrapper is the message to send
    return Optional.ofNullable(emailSchedulingWrapper);
}

From source file:org.apache.lens.driver.jdbc.DruidSQLRewriter.java

/**
 * Analyze internal./*from   w w  w.  j  a va 2 s.  c  o m*/
 *
 * @throws SemanticException the semantic exception
 */
public void analyzeInternal(Configuration conf, HiveConf hconf) throws SemanticException {
    CubeSemanticAnalyzer c1 = new CubeSemanticAnalyzer(conf, hconf);

    QB qb = new QB(null, null, false);

    if (!c1.doPhase1(ast, qb, c1.initPhase1Ctx(), null)) {
        return;
    }

    if (!qb.getSubqAliases().isEmpty()) {
        log.warn("Subqueries in from clause is not supported by {} Query : {}", this, this.query);
        throw new SemanticException(
                "Subqueries in from clause is not supported by " + this + " Query : " + this.query);
    }

    // Get clause name
    TreeSet<String> ks = new TreeSet<String>(qb.getParseInfo().getClauseNames());
    /* The clause name. */
    String clauseName = ks.first();

    if (qb.getParseInfo().getJoinExpr() != null) {
        log.warn("Join queries not supported by {} Query : {}", this, this.query);
        throw new SemanticException("Join queries not supported by " + this + " Query : " + this.query);
    }
    // Split query into trees
    if (qb.getParseInfo().getWhrForClause(clauseName) != null) {
        this.whereAST = qb.getParseInfo().getWhrForClause(clauseName);
    }

    if (qb.getParseInfo().getGroupByForClause(clauseName) != null) {
        this.groupByAST = qb.getParseInfo().getGroupByForClause(clauseName);
    }

    if (qb.getParseInfo().getSelForClause(clauseName) != null) {
        this.selectAST = qb.getParseInfo().getSelForClause(clauseName);
    }

    if (qb.getParseInfo().getHavingForClause(clauseName) != null) {
        this.havingAST = qb.getParseInfo().getHavingForClause(clauseName);
    }

    if (qb.getParseInfo().getOrderByForClause(clauseName) != null) {
        this.orderByAST = qb.getParseInfo().getOrderByForClause(clauseName);
    }

    this.fromAST = HQLParser.findNodeByPath(ast, TOK_FROM);

}

From source file:org.opensextant.solrtexttagger.AbstractTaggerTest.java

/** Asserts the sorted arrays are equals, with a helpful error message when not.
 * @param message/*from  w ww .  j a v  a  2s.  c o  m*/
 * @param expecteds
 * @param actuals
 */
public void assertSortedArrayEquals(String message, Object[] expecteds, Object[] actuals) {
    AssertionError error = null;
    try {
        assertArrayEquals(null, expecteds, actuals);
    } catch (AssertionError e) {
        error = e;
    }
    if (error == null)
        return;
    TreeSet<Object> expectedRemaining = new TreeSet<>(Arrays.asList(expecteds));
    expectedRemaining.removeAll(Arrays.asList(actuals));
    if (!expectedRemaining.isEmpty())
        fail(message + ": didn't find expected " + expectedRemaining.first() + " (of "
                + expectedRemaining.size() + "); " + error);
    TreeSet<Object> actualsRemaining = new TreeSet<>(Arrays.asList(actuals));
    actualsRemaining.removeAll(Arrays.asList(expecteds));
    fail(message + ": didn't expect " + actualsRemaining.first() + " (of " + actualsRemaining.size() + "); "
            + error);
}

From source file:com.basho.riak.client.http.util.logging.ConcurrentLoggingTest.java

/**
 * Test method for/* w  w w  . j a v  a  2s  .  com*/
 * {@link com.basho.riak.client.http.util.logging.LogNoHttpResponseRetryHandler#retryMethod(org.apache.commons.httpclient.HttpMethod, java.io.IOException, int)}
 * .
 * 
 * @throws InterruptedException
 */
@Test
public void retry_concurrentLogAndDump() throws InterruptedException {
    // create a bunch of threads
    // each must log 10 statements and call flush
    // ALL the statements must be present BUT ONCE in
    // the mock delegate appender (order does not matter)
    final int numThreads = 10;
    final LogNoHttpResponseRetryHandler handler = new LogNoHttpResponseRetryHandler();
    ExecutorService es = Executors.newFixedThreadPool(numThreads);
    List<Callable<Void>> tasks = new ArrayList<Callable<Void>>(numThreads);

    final CountDownLatch startLatch = new CountDownLatch(1);
    final CountDownLatch dumpLatch = new CountDownLatch(10);

    for (int i = 0; i < numThreads; i++) {
        final int threadCounter = i;
        tasks.add(new Callable<Void>() {

            @Override
            public Void call() {
                Logger logger = Logger.getLogger("httpclient.wire");
                try {
                    startLatch.await();

                    for (int j = 0; j < 10; j++) {
                        logger.debug(String.format(MESSAGE, new Object[] { threadCounter, j }));
                    }

                    dumpLatch.countDown();
                    dumpLatch.await();

                    handler.retryMethod(new GetMethod(), new NoHttpResponseException(), 0);

                    return null;
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                    throw new RuntimeException(e);
                }
            }
        });
    }

    startLatch.countDown();
    es.invokeAll(tasks);

    verify(mockLogger, times(100)).callAppenders(logEventCaptor.capture());

    TreeSet<Integer> check = new TreeSet<Integer>();

    for (LoggingEvent le : logEventCaptor.getAllValues()) {
        // verify that each of Thread:Iter is present for 0-90-9
        int loc = Integer.parseInt(le.getMessage().toString());
        check.add(loc);
    }

    assertEquals(100, check.size());
    assertEquals(0, (int) check.first());
    assertEquals(99, (int) check.last());
}

From source file:com.taobao.common.tfs.impl.LocalKey.java

public int getSegmentForRead(List<SegmentData> segmentDataList, long offset, byte[] data, int start,
        int length) {
    if (offset > segmentHead.getSegmentLength()) {
        log.error("read offset over file length: " + offset + " > " + segmentHead.getSegmentLength());
        return 0;
    }/*from ww  w  .ja  v a 2 s. co  m*/

    // To read, segment info SHOULD and MUST be adjacent and completed
    // but not check here ...
    SegmentInfo segmentInfo = new SegmentInfo();
    int checkLength = 0;
    int currentLength = 0;
    segmentDataList.clear();

    segmentInfo.setOffset(offset);

    TreeSet<SegmentInfo> tailInfoSet = (TreeSet<SegmentInfo>) segmentInfoSet.tailSet(segmentInfo);

    if (tailInfoSet.size() == 0 || tailInfoSet.first().getOffset() != offset) {
        TreeSet<SegmentInfo> headInfoSet = (TreeSet<SegmentInfo>) segmentInfoSet.headSet(segmentInfo);
        // should NEVER happen: queried offset less than least offset(0) in stored segment info
        if (headInfoSet.size() == 0) {
            log.error("can not find segment for offset: " + offset);
            return TfsConstant.EXIT_GENERAL_ERROR;
        }

        SegmentInfo endInfo = headInfoSet.last();
        // actually SHOULD always occur, cause adjacent and completed read segment info
        if (endInfo.getOffset() + endInfo.getLength() > offset) {
            checkLength = (int) Math.min(length, endInfo.getOffset() + endInfo.getLength() - offset);
            SegmentData segmentData = new SegmentData(endInfo);
            segmentData.setInnerOffset((int) (offset - endInfo.getOffset()));
            segmentData.setData(data, start, checkLength);

            segmentDataList.add(segmentData);
        }
    }

    // get following adjacent segment info
    Iterator it = tailInfoSet.iterator();
    while (segmentDataList.size() < ClientConfig.BATCH_COUNT && checkLength < length && it.hasNext()) {
        segmentInfo = (SegmentInfo) it.next();
        currentLength = Math.min(segmentInfo.getLength(), length - checkLength);

        SegmentData segmentData = new SegmentData(segmentInfo);
        segmentData.setData(data, start + checkLength, currentLength);

        segmentDataList.add(segmentData);
        checkLength += currentLength;
    }
    return checkLength;
}

From source file:edu.mbl.jif.imaging.mmtiff.FileSet.java

/**
 * Completes the current time point of an aborted acquisition with blank images, so that it can
 * be opened correctly by ImageJ/BioForamts
 *//* ww w .  j a v a2 s  . c o  m*/
private void completeFrameWithBlankImages(int frame) throws JSONException, MMScriptException {

    int numFrames = MDUtils.getNumFrames(mpTiff_.summaryMetadata_);
    int numSlices = MDUtils.getNumSlices(mpTiff_.summaryMetadata_);
    int numChannels = MDUtils.getNumChannels(mpTiff_.summaryMetadata_);
    if (numFrames > frame + 1) {
        TreeSet<String> writtenImages = new TreeSet<String>();
        for (MultipageTiffWriter w : tiffWriters_) {
            writtenImages.addAll(w.getIndexMap().keySet());
            w.setAbortedNumFrames(frame + 1);
        }
        int positionIndex = MDUtils.getIndices(writtenImages.first())[3];
        if (mpTiff_.omeTiff_) {
            mpTiff_.omeMetadata_.setNumFrames(positionIndex, frame + 1);
        }
        TreeSet<String> lastFrameLabels = new TreeSet<String>();
        for (int c = 0; c < numChannels; c++) {
            for (int z = 0; z < numSlices; z++) {
                lastFrameLabels.add(MDUtils.generateLabel(c, z, frame, positionIndex));
            }
        }
        lastFrameLabels.removeAll(writtenImages);
        try {
            for (String label : lastFrameLabels) {
                tiffWriters_.getLast().writeBlankImage(label);
                if (mpTiff_.omeTiff_) {
                    JSONObject dummyTags = new JSONObject();
                    int channel = Integer.parseInt(label.split("_")[0]);
                    int slice = Integer.parseInt(label.split("_")[1]);
                    MDUtils.setChannelIndex(dummyTags, channel);
                    MDUtils.setFrameIndex(dummyTags, frame);
                    MDUtils.setSliceIndex(dummyTags, slice);
                    mpTiff_.omeMetadata_.addImageTagsToOME(dummyTags, ifdCount_, baseFilename_,
                            currentTiffFilename_);
                }
            }
        } catch (IOException ex) {
            ReportingUtils.logError("problem writing dummy image");
        }
    }
}

From source file:com.fileanalyzer.util.LineStatisticCalculator.java

public FileStatistic getFileStatistic() {
    FileStatistic fileStatis = new FileStatistic();

    fileStatis.setLengthLine(new Long(line.length()));
    String strArr[] = line.split(regexp);
    TreeSet<Integer> maxWord = new TreeSet();
    TreeSet<Integer> minWord = new TreeSet();
    long sumWords = 0;
    for (int i = 0; i < strArr.length; ++i) {
        int strSize = strArr[i].length();
        sumWords += strSize;/*from ww w .ja va 2  s . c o  m*/
        if (i > 0 && i < strArr.length - 1)
            maxWord.add(strSize);
        minWord.add(strSize);
    }
    fileStatis.setLine(HtmlUtils.htmlEscape(line));
    if (sumWords > 0) {
        fileStatis.setAvgWord(new Double(sumWords / strArr.length));
        fileStatis.setMinWord(new Long(minWord.first()));
    }
    if (maxWord.size() > 0)
        fileStatis.setMaxWord(new Long(maxWord.last()));
    if (getIdFk() != null)
        fileStatis.setFileId(getIdFk());
    return fileStatis;
}

From source file:net.sourceforge.fenixedu.applicationTier.Servico.student.AddStudentToFinalDegreeWorkStudentGroup.java

private static Registration findSomeRegistration(final String username) {
    User user = User.findByUsername(username);
    if (user != null) {
        final Person person = user.getPerson();
        if (person != null) {
            final Student student = person.getStudent();
            final TreeSet<Registration> registrations = new TreeSet<Registration>(
                    new Comparator<Registration>() {

                        @Override
                        public int compare(final Registration r1, final Registration r2) {
                            final DegreeType dt1 = r1.getDegreeType();
                            final DegreeType dt2 = r2.getDegreeType();
                            return 0 - dt1.compareTo(dt2);
                        }/*from w w w  .j a  va2 s  .c om*/

                    }) {

                @Override
                public boolean add(final Registration r) {
                    final DegreeType degreeType = r.getDegreeType();
                    return isValidDegreeType(degreeType) && super.add(r);
                }

                private boolean isValidDegreeType(DegreeType degreeType) {
                    return degreeType == DegreeType.BOLONHA_MASTER_DEGREE
                            || degreeType == DegreeType.BOLONHA_INTEGRATED_MASTER_DEGREE
                            || degreeType == DegreeType.BOLONHA_DEGREE;
                }

            };
            registrations.addAll(student.getRegistrationsSet());
            return registrations.isEmpty() ? null : registrations.first();
        }
    }
    return null;
}

From source file:com.fileanalyzer.util.LineStatisticCalculator.java

public StringBuilder getSqlInsertFileStatistic() {
    Map<String, Object> params = new HashMap<>();
    StringBuilder sql = new StringBuilder("INSERT INTO " + FileStatistic.FileStatisticKey.TABLE + " ");
    params.put(FileStatisticKey.LENGTHLINE, new Long(line.length()));
    String strArr[] = line.split(regexp);
    TreeSet<Integer> maxWord = new TreeSet();
    TreeSet<Integer> minWord = new TreeSet();
    long sumWords = 0;
    for (int i = 0; i < strArr.length; ++i) {
        int strSize = strArr[i].length();
        sumWords += strSize;//from www.  j a v  a2 s  .  c om
        if (i > 0 && i < strArr.length - 1)
            maxWord.add(strSize);
        minWord.add(strSize);
    }
    params.put(FileStatisticKey.LINE, HtmlUtils.htmlEscape(line));
    if (sumWords > 0) {
        params.put(FileStatisticKey.AVGWORD, new Double(sumWords / strArr.length));
        params.put(FileStatisticKey.MINWORD, new Long(minWord.first()));
    }
    if (maxWord.size() > 0)
        params.put(FileStatisticKey.MAXWORD, new Long(new Long(maxWord.last())));
    if (getIdFk() != null)
        params.put(FileStatisticKey.FILEID, getIdFk());
    genParamAndValues(sql, params);

    return sql;
}

From source file:org.apache.ambari.server.serveraction.upgrades.AutoSkipFailedSummaryAction.java

/**
 * {@inheritDoc}/*www. java  2 s .com*/
 */
@Override
public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext)
        throws AmbariException, InterruptedException {

    HostRoleCommand hostRoleCommand = getHostRoleCommand();
    long requestId = hostRoleCommand.getRequestId();
    long stageId = hostRoleCommand.getStageId();

    // use the host role command to get to the parent upgrade group
    UpgradeItemEntity upgradeItem = m_upgradeDAO.findUpgradeItemByRequestAndStage(requestId, stageId);
    UpgradeGroupEntity upgradeGroup = upgradeItem.getGroupEntity();

    // find all of the stages in this group
    long upgradeGroupId = upgradeGroup.getId();
    UpgradeGroupEntity upgradeGroupEntity = m_upgradeDAO.findUpgradeGroup(upgradeGroupId);
    List<UpgradeItemEntity> groupUpgradeItems = upgradeGroupEntity.getItems();
    TreeSet<Long> stageIds = new TreeSet<>();
    for (UpgradeItemEntity groupUpgradeItem : groupUpgradeItems) {
        stageIds.add(groupUpgradeItem.getStageId());
    }

    // for every stage, find all tasks that have been SKIPPED_FAILED - we use a
    // bit of trickery here since within any given request, the stage ID are
    // always sequential. This allows us to make a simple query instead of some
    // overly complex IN or NESTED SELECT query
    long minStageId = stageIds.first();
    long maxStageId = stageIds.last();

    List<HostRoleCommandEntity> skippedTasks = m_hostRoleCommandDAO.findByStatusBetweenStages(
            hostRoleCommand.getRequestId(), HostRoleStatus.SKIPPED_FAILED, minStageId, maxStageId);

    if (skippedTasks.isEmpty()) {
        return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", "There were no skipped failures", null);
    }

    StringBuilder buffer = new StringBuilder("The following steps failed and were automatically skipped:\n");

    for (HostRoleCommandEntity skippedTask : skippedTasks) {
        try {
            ServiceComponentHostEventWrapper eventWrapper = new ServiceComponentHostEventWrapper(
                    skippedTask.getEvent());

            ServiceComponentHostEvent event = eventWrapper.getEvent();

            String hostName = skippedTask.getHostName();
            if (null != hostName) {
                Map<String, Object> failures = m_structuredFailures.get(hostName);
                if (null == failures) {
                    failures = new HashMap<>();
                    m_structuredFailures.put(hostName, failures);
                }

                failures.put("id", skippedTask.getTaskId());
                failures.put("exit_code", skippedTask.getExitcode());
                failures.put("output_log", skippedTask.getOutputLog());
                failures.put("error_log", skippedTask.getErrorLog());

                String stdOut = StringUtils.abbreviateMiddle(new String(skippedTask.getStdOut()),
                        MIDDLE_ELLIPSIZE_MARKER, 1000);

                String stderr = StringUtils.abbreviateMiddle(new String(skippedTask.getStdError()),
                        MIDDLE_ELLIPSIZE_MARKER, 1000);

                failures.put("stdout", stdOut);
                failures.put("stderr", stderr);
            }

            buffer.append(event.getServiceComponentName());
            if (null != event.getHostName()) {
                buffer.append(" on ");
                buffer.append(event.getHostName());
            }

            buffer.append(": ");
            buffer.append(skippedTask.getCommandDetail());
            buffer.append("\n");
        } catch (Exception exception) {
            LOG.warn("Unable to extract failure information for {}", skippedTask);
            buffer.append(": ");
            buffer.append(skippedTask);
        }
    }

    String structuredOutput = m_gson.toJson(m_structuredFailures);
    String standardOutput = MessageFormat.format(FAILURE_STD_OUT_TEMPLATE, skippedTasks.size());
    String standardError = buffer.toString();

    return createCommandReport(0, HostRoleStatus.HOLDING, structuredOutput, standardOutput, standardError);
}