Example usage for java.util TreeSet last

List of usage examples for java.util TreeSet last

Introduction

In this page you can find the example usage for java.util TreeSet last.

Prototype

public E last() 

Source Link

Usage

From source file:com.taobao.common.tfs.impl.LocalKey.java

private void checkOverlap(SegmentInfo segmentInfo, List<SegmentInfo> gcSegmentList) {
    TreeSet<SegmentInfo> headInfoSet = (TreeSet<SegmentInfo>) (segmentInfoSet.headSet(segmentInfo));
    if (headInfoSet.size() != 0) {
        SegmentInfo endInfo = headInfoSet.last();
        // overlap, gc
        if (endInfo.getOffset() + endInfo.getLength() > segmentInfo.getOffset()) {
            gcSegmentList.add(endInfo);//from w  ww  . j a  va2  s  . c  om
        }
    }
}

From source file:com.basho.riak.client.http.util.logging.ConcurrentLoggingTest.java

/**
 * Test method for//from  www .  ja va 2  s.c  o m
 * {@link com.basho.riak.client.http.util.logging.LogNoHttpResponseRetryHandler#retryMethod(org.apache.commons.httpclient.HttpMethod, java.io.IOException, int)}
 * .
 * 
 * @throws InterruptedException
 */
@Test
public void retry_concurrentLogAndDump() throws InterruptedException {
    // create a bunch of threads
    // each must log 10 statements and call flush
    // ALL the statements must be present BUT ONCE in
    // the mock delegate appender (order does not matter)
    final int numThreads = 10;
    final LogNoHttpResponseRetryHandler handler = new LogNoHttpResponseRetryHandler();
    ExecutorService es = Executors.newFixedThreadPool(numThreads);
    List<Callable<Void>> tasks = new ArrayList<Callable<Void>>(numThreads);

    final CountDownLatch startLatch = new CountDownLatch(1);
    final CountDownLatch dumpLatch = new CountDownLatch(10);

    for (int i = 0; i < numThreads; i++) {
        final int threadCounter = i;
        tasks.add(new Callable<Void>() {

            @Override
            public Void call() {
                Logger logger = Logger.getLogger("httpclient.wire");
                try {
                    startLatch.await();

                    for (int j = 0; j < 10; j++) {
                        logger.debug(String.format(MESSAGE, new Object[] { threadCounter, j }));
                    }

                    dumpLatch.countDown();
                    dumpLatch.await();

                    handler.retryMethod(new GetMethod(), new NoHttpResponseException(), 0);

                    return null;
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                    throw new RuntimeException(e);
                }
            }
        });
    }

    startLatch.countDown();
    es.invokeAll(tasks);

    verify(mockLogger, times(100)).callAppenders(logEventCaptor.capture());

    TreeSet<Integer> check = new TreeSet<Integer>();

    for (LoggingEvent le : logEventCaptor.getAllValues()) {
        // verify that each of Thread:Iter is present for 0-90-9
        int loc = Integer.parseInt(le.getMessage().toString());
        check.add(loc);
    }

    assertEquals(100, check.size());
    assertEquals(0, (int) check.first());
    assertEquals(99, (int) check.last());
}

From source file:com.inmobi.databus.local.LocalStreamService.java

protected String getCurrentFile(FileSystem fs, FileStatus[] files, long lastFileTimeout) {
    //Proposed Algo :-> Sort files based on timestamp
    //if ((currentTimeStamp - last file's timestamp) > 5min ||
    //     if there are no files)
    // then null (implying process this file as non-current file)
    // else/*from www. j a  v  a 2s.  c  om*/
    // return last file as the current file
    class FileTimeStampComparator implements Comparator {
        public int compare(Object o, Object o1) {
            FileStatus file1 = (FileStatus) o;
            FileStatus file2 = (FileStatus) o1;
            long file1Time = file1.getModificationTime();
            long file2Time = file2.getModificationTime();
            if ((file1Time < file2Time))
                return -1;
            else
                return 1;
        }
    }

    if (files == null || files.length == 0)
        return null;
    TreeSet<FileStatus> sortedFiles = new TreeSet<FileStatus>(new FileTimeStampComparator());
    for (FileStatus file : files) {
        sortedFiles.add(file);
    }

    //get last file from set
    FileStatus lastFile = sortedFiles.last();

    long currentTime = System.currentTimeMillis();
    long lastFileTime = lastFile.getModificationTime();
    if (currentTime - lastFileTime >= lastFileTimeout) {
        return null;
    } else
        return lastFile.getPath().getName();
}

From source file:com.springsource.hq.plugin.tcserver.plugin.serverconfig.FileSettingsRepository.java

public void revertToPreviousConfiguration(ConfigResponse config) throws PluginException {
    TreeSet<String> backupSet = (TreeSet<String>) fileUtility
            .getLatestBackupDirectories(config.getValue("installpath"));
    if (!backupSet.isEmpty()) {
        try {// ww w  .  j a v a  2  s  .c o m
            fileUtility.revertToBackupFiles(config.getValue("installpath"), backupSet.last());
        } catch (FileUtilityException e) {
            throw new PluginException(e.getMessage());
        }
    } else {
        throw new PluginException("Unable to revert to backup files: No backup directories found in "
                + new File(config.getValue("installpath"), "backup").getAbsolutePath());
    }
}

From source file:com.fileanalyzer.util.LineStatisticCalculator.java

public FileStatistic getFileStatistic() {
    FileStatistic fileStatis = new FileStatistic();

    fileStatis.setLengthLine(new Long(line.length()));
    String strArr[] = line.split(regexp);
    TreeSet<Integer> maxWord = new TreeSet();
    TreeSet<Integer> minWord = new TreeSet();
    long sumWords = 0;
    for (int i = 0; i < strArr.length; ++i) {
        int strSize = strArr[i].length();
        sumWords += strSize;//from   ww  w. j  ava2s.co  m
        if (i > 0 && i < strArr.length - 1)
            maxWord.add(strSize);
        minWord.add(strSize);
    }
    fileStatis.setLine(HtmlUtils.htmlEscape(line));
    if (sumWords > 0) {
        fileStatis.setAvgWord(new Double(sumWords / strArr.length));
        fileStatis.setMinWord(new Long(minWord.first()));
    }
    if (maxWord.size() > 0)
        fileStatis.setMaxWord(new Long(maxWord.last()));
    if (getIdFk() != null)
        fileStatis.setFileId(getIdFk());
    return fileStatis;
}

From source file:com.taobao.common.tfs.impl.LocalKey.java

public int getSegmentForRead(List<SegmentData> segmentDataList, long offset, byte[] data, int start,
        int length) {
    if (offset > segmentHead.getSegmentLength()) {
        log.error("read offset over file length: " + offset + " > " + segmentHead.getSegmentLength());
        return 0;
    }/* www .ja  v  a  2 s.  c om*/

    // To read, segment info SHOULD and MUST be adjacent and completed
    // but not check here ...
    SegmentInfo segmentInfo = new SegmentInfo();
    int checkLength = 0;
    int currentLength = 0;
    segmentDataList.clear();

    segmentInfo.setOffset(offset);

    TreeSet<SegmentInfo> tailInfoSet = (TreeSet<SegmentInfo>) segmentInfoSet.tailSet(segmentInfo);

    if (tailInfoSet.size() == 0 || tailInfoSet.first().getOffset() != offset) {
        TreeSet<SegmentInfo> headInfoSet = (TreeSet<SegmentInfo>) segmentInfoSet.headSet(segmentInfo);
        // should NEVER happen: queried offset less than least offset(0) in stored segment info
        if (headInfoSet.size() == 0) {
            log.error("can not find segment for offset: " + offset);
            return TfsConstant.EXIT_GENERAL_ERROR;
        }

        SegmentInfo endInfo = headInfoSet.last();
        // actually SHOULD always occur, cause adjacent and completed read segment info
        if (endInfo.getOffset() + endInfo.getLength() > offset) {
            checkLength = (int) Math.min(length, endInfo.getOffset() + endInfo.getLength() - offset);
            SegmentData segmentData = new SegmentData(endInfo);
            segmentData.setInnerOffset((int) (offset - endInfo.getOffset()));
            segmentData.setData(data, start, checkLength);

            segmentDataList.add(segmentData);
        }
    }

    // get following adjacent segment info
    Iterator it = tailInfoSet.iterator();
    while (segmentDataList.size() < ClientConfig.BATCH_COUNT && checkLength < length && it.hasNext()) {
        segmentInfo = (SegmentInfo) it.next();
        currentLength = Math.min(segmentInfo.getLength(), length - checkLength);

        SegmentData segmentData = new SegmentData(segmentInfo);
        segmentData.setData(data, start + checkLength, currentLength);

        segmentDataList.add(segmentData);
        checkLength += currentLength;
    }
    return checkLength;
}

From source file:com.fileanalyzer.util.LineStatisticCalculator.java

public StringBuilder getSqlInsertFileStatistic() {
    Map<String, Object> params = new HashMap<>();
    StringBuilder sql = new StringBuilder("INSERT INTO " + FileStatistic.FileStatisticKey.TABLE + " ");
    params.put(FileStatisticKey.LENGTHLINE, new Long(line.length()));
    String strArr[] = line.split(regexp);
    TreeSet<Integer> maxWord = new TreeSet();
    TreeSet<Integer> minWord = new TreeSet();
    long sumWords = 0;
    for (int i = 0; i < strArr.length; ++i) {
        int strSize = strArr[i].length();
        sumWords += strSize;// w w  w .  j a v  a  2s.c o  m
        if (i > 0 && i < strArr.length - 1)
            maxWord.add(strSize);
        minWord.add(strSize);
    }
    params.put(FileStatisticKey.LINE, HtmlUtils.htmlEscape(line));
    if (sumWords > 0) {
        params.put(FileStatisticKey.AVGWORD, new Double(sumWords / strArr.length));
        params.put(FileStatisticKey.MINWORD, new Long(minWord.first()));
    }
    if (maxWord.size() > 0)
        params.put(FileStatisticKey.MAXWORD, new Long(new Long(maxWord.last())));
    if (getIdFk() != null)
        params.put(FileStatisticKey.FILEID, getIdFk());
    genParamAndValues(sql, params);

    return sql;
}

From source file:org.obm.push.contacts.ContactsBackendTest.java

@Test
public void sortedByDefaultFolderName() {
    final String defaultFolderName = DEFAULT_PARENT_BOOK_NAME;

    Folder f1 = Folder.builder().name("users").uid(-1).ownerLoginAtDomain(user.getLoginAtDomain()).build();
    Folder f2 = Folder.builder().name("collected_contacts").uid(2).ownerLoginAtDomain(user.getLoginAtDomain())
            .build();/*from  ww w .j a va2 s. co  m*/
    Folder f3 = Folder.builder().name(defaultFolderName).uid(3).ownerLoginAtDomain(user.getLoginAtDomain())
            .build();
    Folder f4 = Folder.builder().name("my address book").uid(4).ownerLoginAtDomain(user.getLoginAtDomain())
            .build();

    TreeSet<Folder> treeset = new TreeSet<Folder>(new ComparatorUsingFolderName(defaultFolderName));
    treeset.addAll(ImmutableList.of(f1, f2, f3, f4));

    assertThat(treeset).hasSize(4);
    assertThat(treeset).contains(f1, f2, f3, f4);
    assertThat(treeset.first().getName()).isEqualTo(defaultFolderName);
    assertThat(treeset.last().getName()).isEqualTo("users");
}

From source file:com.thoughtworks.go.server.dao.PipelineSqlMapDao.java

private boolean isCurrentLatestInactive(TreeSet<Long> ids) {
    return !loadHistory(ids.last()).isAnyStageActive();
}

From source file:com.thoughtworks.go.server.dao.PipelineSqlMapDao.java

private boolean isNewerThanCurrentLatest(Stage stage, TreeSet<Long> ids) {
    return stage.getPipelineId() > ids.last();
}