Example usage for java.util SortedSet size

List of usage examples for java.util SortedSet size

Introduction

In this page you can find the example usage for java.util SortedSet size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:org.commoncrawl.util.ArcFileWriter.java

/**
 * append a pre-generated arcfile entry directly into the arc file writer
 * /*w w w . jav a2s  .c  o  m*/
 * @param arcFileData
 *          - the compressed arc file entry
 * @param dataBufferLength
 *          - the entry length
 * @throws IOException
 */
public void writeRawArcFileItem(String contentType, byte[] arcFileData, int dataBufferLength)
        throws IOException {
    // check to see if we need to start a new underlying file
    checkSize(0, dataBufferLength);
    // update stats
    getActiveFile()._totalContentBytesWritten += dataBufferLength;
    getActiveFile()._itemsWritten++;
    SortedSet<Integer> counts = _mimeTypeCounts.get(contentType);
    if (counts.size() == 0) {
        counts.add(1);
    } else {
        int count = counts.first() + 1;
        counts.clear();
        counts.add(count);
    }
    // record start position of this item
    _lastItemPos = getActiveFile().getFileSize();
    // write out data
    _out.write(arcFileData, 0, dataBufferLength);
    // record size of last item
    _lastItemCompressedSize = (getActiveFile().getFileSize() - _lastItemPos);
    // update stats
    getActiveFile()._compressedBytesWritten += _lastItemCompressedSize;
}

From source file:org.commoncrawl.util.ArcFileWriter.java

private void preWriteRecordTasks(int headerBytesLength, int contentBytesLength, String contentType)
        throws IOException {

    checkSize(headerBytesLength, contentBytesLength);

    // update stats
    getActiveFile()._totalHeaderBytesWritten += headerBytesLength;
    getActiveFile()._totalContentBytesWritten += contentBytesLength;
    getActiveFile()._itemsWritten++;//w  ww .  j ava  2 s . co  m
    SortedSet<Integer> counts = _mimeTypeCounts.get(contentType);
    if (counts.size() == 0) {
        counts.add(1);
    } else {
        int count = counts.first() + 1;
        counts.clear();
        counts.add(count);
    }

    // record start position of this item
    _lastItemPos = getActiveFile().getFileSize();

    // Wrap stream in GZIP Writer.
    // The below construction immediately writes the GZIP 'default'
    // header out on the underlying stream.
    _out = new CompressedStream(_out);
}

From source file:org.jahia.utils.maven.plugin.contentgenerator.wise.FileAndFolderService.java

public Set<FileBO> generateFiles(Integer nbFilesToGenerate, String currentNodePath, List<String> fileNames,
        Integer nbUsers, File filesDirectory, List<TagBO> tags, String wiseInstanceName) {
    // logger.debug("Generating " + nbFiles + " files");
    SortedSet<FileBO> files = new TreeSet<FileBO>();

    List<String> fileNamesAvailable = new ArrayList<String>(fileNames);

    Integer nbAvailableFiles = fileNames.size();
    int currentFilenameIndex = 0;

    String imageExtensions[] = { ".png", ".gif", ".jpeg", ".jpg" };
    String officeDocumentExtensions[] = { ".doc", ".xls", ".ppt", ".docx", ".xlsx", ".pptx" };

    String creator = "root";
    String owner = "root";
    String editor = "root";
    String reader = "root";
    int idCreator;
    int idOwner;//from  w  w w.j  a  v a2 s.c om
    int idEditor;
    int idReader;
    int nbOfTags = tags.size();
    int randFilenameIndex;
    String extractedContent = "";
    FileBO newFile = null;

    while (files.size() < nbFilesToGenerate) {
        // logger.debug("Generating file " + (files.size() + 1) + "/" + nbFilesToGenerate);

        String fileName = "";
        if (nbFilesToGenerate.compareTo(nbAvailableFiles) >= 0) {
            fileName = fileNames.get(currentFilenameIndex);
            currentFilenameIndex++;
        } else {
            int remainingNbAvailableFiles = fileNamesAvailable.size() - 1;
            randFilenameIndex = rand.nextInt(remainingNbAvailableFiles);
            fileName = fileNamesAvailable.get(randFilenameIndex);
            fileNamesAvailable.remove(randFilenameIndex);
        }

        String mixin = "";

        if (nbUsers != null && (nbUsers.compareTo(0) > 0)) {
            idCreator = rand.nextInt(nbUsers - 1);
            creator = "user" + idCreator;

            idOwner = rand.nextInt(nbUsers - 1);
            owner = "user" + idOwner;

            idEditor = rand.nextInt(nbUsers - 1);
            editor = "user" + idEditor;

            idReader = rand.nextInt(nbUsers - 1);
            reader = "user" + idReader;
        }

        // Choose correct mixin depending on the file extension
        String fileExtension = getFileExtension(fileName);
        if (Arrays.asList(imageExtensions).contains(fileExtension)) {
            mixin = " jmix:image";
        } else if (Arrays.asList(officeDocumentExtensions).contains(fileExtension)) {
            mixin = " jmix:document";
        }

        // Detect MIME type
        File f = new File(filesDirectory + sep + fileName);
        String mimeType = getMimeType(f);

        // Extract file content
        Metadata metadata = new Metadata();
        if (mimeType != null) {
            metadata.set(Metadata.CONTENT_TYPE, mimeType);
        }

        try {
            extractedContent = new Tika().parseToString(f);
        } catch (FileNotFoundException e) {
            logger.error("File not found during text extraction " + f.getAbsoluteFile());
            e.printStackTrace();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (TikaException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        String description = getCurrentOftenDescriptionWord() + " " + getCurrentSeldomDescriptionWord();

        // Random choice of tag
        int randomTagIndex = rand.nextInt(nbOfTags - 1);
        TagBO tag = tags.get(randomTagIndex);

        // Random creation date
        String creationDate = getRandomJcrDate(timestampDifference);
        newFile = new FileBO(fileName, mixin, mimeType, currentNodePath + "/" + fileName, creator, owner,
                editor, reader, extractedContent, description, tag.getTagName(), wiseInstanceName,
                creationDate);
        files.add(newFile);
    }
    return files;
}

From source file:com.github.FraggedNoob.GitLabTransfer.GitlabRelatedData.java

/**
 * List all the issue notes in this instance.
 *//*w ww  .j a va 2 s  .c  o m*/
public void listAllIssuesNotes() {
    if (!issues.isEmpty()) {
        // issues and notes
        System.out.println("Project Issues:");
        for (GitlabIssue i : issues) {
            System.out.printf("IID=%d, Title=%s\n", i.getIid(), i.getTitle());
            SortedSet<GitlabNote> noteset = issueNotes.get(i.getIid());
            if ((noteset == null) || (noteset.isEmpty())) {
                System.out.printf("\t (no notes)\n");
            } else {
                System.out.printf("\t [%d notes]\n", noteset.size());
            }
        }
    }
}

From source file:piecework.engine.activiti.ActivitiEngineProxy.java

@Override
public ProcessExecutionResults findExecutions(SearchCriteria criteria) throws ProcessEngineException {
    if (!criteria.getEngines().contains(getKey()))
        return null;

    HistoricProcessInstanceQuery query = instanceQuery(criteria);

    ProcessExecutionResults.Builder resultsBuilder = new ProcessExecutionResults.Builder();

    List<HistoricProcessInstance> instances;

    // Can't use paging since we're going to filter after the fact
    instances = query.list();/*from  w  w  w .j a v  a2 s .  co  m*/
    int size = instances.size();

    resultsBuilder.firstResult(0);
    resultsBuilder.maxResults(size);
    resultsBuilder.total(size);

    // Only need to worry about filtering if there are more than 1 key, since with 1 key
    // it's part of the search that Activiti performs --- see the instanceQuery() method
    SortedSet<String> engineProcessDefinitionKeys = new TreeSet<String>(
            criteria.getEngineProcessDefinitionKeys());
    Set<String> processDefinitionIds = proxyHelper.getProcessDefinitionIds(
            engineProcessDefinitionKeys.toArray(new String[engineProcessDefinitionKeys.size()]));

    List<ProcessExecution> executions;
    if (instances != null && !instances.isEmpty()) {
        executions = new ArrayList<ProcessExecution>(instances.size());

        for (HistoricProcessInstance instance : instances) {
            if (!processDefinitionIds.contains(instance.getProcessDefinitionId()))
                continue;

            ProcessExecution.Builder executionBuilder = new ProcessExecution.Builder()
                    .executionId(instance.getId()).businessKey(instance.getBusinessKey())
                    .initiatorId(instance.getStartUserId()).deleteReason(instance.getDeleteReason());

            if (criteria.isIncludeVariables()) {
                Map<String, Object> variables = processEngine.getRuntimeService()
                        .getVariables(instance.getId());
                executionBuilder.data(variables);
            }

            executions.add(executionBuilder.build());
        }
    } else {
        executions = Collections.emptyList();
    }

    resultsBuilder.executions(executions);

    return resultsBuilder.build();
}

From source file:com.twitter.hraven.datasource.JobHistoryRawService.java

/**
 * Given a min and max jobId, get a {@link Scan} to go through all the records
 * loaded in the {@link Constants#HISTORY_RAW_TABLE}, get all the rowkeys and
 * create a list of scans with batchSize number of rows in the rawTable.
 * <p>/*from  w w w  .ja va2s. c  om*/
 * Note that this can be a somewhat slow operation as the
 * {@link Constants#HISTORY_RAW_TABLE} will have to be scanned.
 * 
 * @param cluster
 *          on which the Hadoop jobs ran.
 * @param minJobId
 *          used to start the scan. If null then there is no min limit on
 *          JobId.
 * @param maxJobId
 *          used to end the scan (inclusive). If null then there is no max
 *          limit on jobId.
 * @param reprocess
 *          Reprocess those records that may have been processed already.
 *          Otherwise successfully processed jobs are skipped.
 * @param reloadOnly
 *          load only those raw records that were marked to be reloaded using
 *          {@link #markJobForReprocesssing(QualifiedJobId)}
 * @return a scan of jobIds between the specified min and max. Retrieves only
 *         one version of each column.
 * @throws IOException
 * @throws RowKeyParseException
 *           when rows returned from the Raw table do not conform to the
 *           expected row key.
 */
public List<Scan> getHistoryRawTableScans(String cluster, String minJobId, String maxJobId, boolean reprocess,
        int batchSize) throws IOException, RowKeyParseException {

    List<Scan> scans = new LinkedList<Scan>();

    // Get all the values in the scan so that we can evenly chop them into
    // batch size chunks.
    // The problem is that processRecords min and max can have vastly
    // overlapping ranges, and in addition, they may have a minJobId of a long
    // running Hadoop job that is processed much later. Many jobIds that are
    // of shorter jobs that have already been processed will in between the
    // min and max, but since the scan returns only the records that are not
    // already processed, the returned list may have large gaps.
    Scan scan = getHistoryRawTableScan(cluster, minJobId, maxJobId, reprocess, false);

    SortedSet<JobId> orderedJobIds = new TreeSet<JobId>();

    ResultScanner scanner = null;
    try {
        LOG.info("Scanning " + Constants.HISTORY_RAW_TABLE + " table from " + minJobId + " to " + maxJobId);
        scanner = rawTable.getScanner(scan);
        for (Result result : scanner) {
            JobId qualifiedJobId = getQualifiedJobIdFromResult(result);
            orderedJobIds.add(qualifiedJobId);
        }
    } finally {
        if (scanner != null) {
            scanner.close();
        }
    }

    // Now chop the set into chunks.
    List<Range<JobId>> ranges = BatchUtil.getRanges(orderedJobIds, batchSize);
    LOG.info("Dividing " + orderedJobIds.size() + " jobs in " + ranges.size() + " ranges.");

    for (Range<JobId> range : ranges) {
        Scan rawScan = getHistoryRawTableScan(cluster, range.getMin().getJobIdString(),
                range.getMax().getJobIdString(), reprocess, true);
        scans.add(rawScan);
    }

    return scans;
}

From source file:net.nicholaswilliams.java.teamcity.plugin.buildNumber.TestPluginConfigurationServiceDefault.java

@Test
public void testGetAllSharedBuildNumbersSortedById02() {
    final ConfigurationEntity configuration = this.getConfiguration();

    SharedBuildNumberEntity sharedBuildNumber1 = new SharedBuildNumberEntity();
    sharedBuildNumber1.setId(5);//from  w  w  w .  j  a  v a 2  s .  c o m
    configuration.addOrUpdateBuildNumber(sharedBuildNumber1);

    SharedBuildNumberEntity sharedBuildNumber2 = new SharedBuildNumberEntity();
    sharedBuildNumber2.setId(22);
    configuration.addOrUpdateBuildNumber(sharedBuildNumber2);

    SharedBuildNumberEntity sharedBuildNumber3 = new SharedBuildNumberEntity();
    sharedBuildNumber3.setId(1);
    configuration.addOrUpdateBuildNumber(sharedBuildNumber3);

    replay(this.service);

    SortedSet<SharedBuildNumber> set = this.service.getAllSharedBuildNumbersSortedById(true);

    assertNotNull("The set should not be null.", set);
    assertEquals("The set is the wrong size.", 3, set.size());

    SharedBuildNumberEntity[] values = new SharedBuildNumberEntity[] { sharedBuildNumber2, sharedBuildNumber1,
            sharedBuildNumber3 };

    int i = 0;
    for (SharedBuildNumber sharedBuildNumber : set) {
        assertEquals("The build number is not correct.", values[i].getId(), sharedBuildNumber.getId());

        i++;
    }

    verify(this.service);
}

From source file:net.nicholaswilliams.java.teamcity.plugin.buildNumber.TestPluginConfigurationServiceDefault.java

@Test
public void testGetAllSharedBuildNumbersSortedById01() {
    final ConfigurationEntity configuration = this.getConfiguration();

    SharedBuildNumberEntity sharedBuildNumber1 = new SharedBuildNumberEntity();
    sharedBuildNumber1.setId(5);/*from   w  ww  . jav a 2 s  .  co  m*/
    configuration.addOrUpdateBuildNumber(sharedBuildNumber1);

    SharedBuildNumberEntity sharedBuildNumber2 = new SharedBuildNumberEntity();
    sharedBuildNumber2.setId(22);
    configuration.addOrUpdateBuildNumber(sharedBuildNumber2);

    SharedBuildNumberEntity sharedBuildNumber3 = new SharedBuildNumberEntity();
    sharedBuildNumber3.setId(1);
    configuration.addOrUpdateBuildNumber(sharedBuildNumber3);

    replay(this.service);

    SortedSet<SharedBuildNumber> set = this.service.getAllSharedBuildNumbersSortedById(false);

    assertNotNull("The set should not be null.", set);
    assertEquals("The set is the wrong size.", 3, set.size());

    SharedBuildNumberEntity[] values = new SharedBuildNumberEntity[] { sharedBuildNumber3, sharedBuildNumber1,
            sharedBuildNumber2 };

    int i = 0;
    for (SharedBuildNumber sharedBuildNumber : set) {
        assertEquals("The build number is not correct.", values[i].getId(), sharedBuildNumber.getId());

        i++;
    }

    verify(this.service);
}

From source file:net.nicholaswilliams.java.teamcity.plugin.buildNumber.TestPluginConfigurationServiceDefault.java

@Test
public void testGetAllSharedBuildNumbersSortedByName02() {
    final ConfigurationEntity configuration = this.getConfiguration();

    SharedBuildNumberEntity sharedBuildNumber1 = new SharedBuildNumberEntity();
    sharedBuildNumber1.setId(1);/*from   w ww .j  a v a2  s . c  o m*/
    sharedBuildNumber1.setName("This is a killer name!");
    configuration.addOrUpdateBuildNumber(sharedBuildNumber1);

    SharedBuildNumberEntity sharedBuildNumber2 = new SharedBuildNumberEntity();
    sharedBuildNumber2.setId(2);
    sharedBuildNumber2.setName("Hello, World.");
    configuration.addOrUpdateBuildNumber(sharedBuildNumber2);

    SharedBuildNumberEntity sharedBuildNumber3 = new SharedBuildNumberEntity();
    sharedBuildNumber3.setId(3);
    sharedBuildNumber3.setName("This is a cool name.");
    configuration.addOrUpdateBuildNumber(sharedBuildNumber3);

    replay(this.service);

    SortedSet<SharedBuildNumber> set = this.service.getAllSharedBuildNumbersSortedByName(true);

    assertNotNull("The set should not be null.", set);
    assertEquals("The set is the wrong size.", 3, set.size());

    SharedBuildNumberEntity[] values = new SharedBuildNumberEntity[] { sharedBuildNumber1, sharedBuildNumber3,
            sharedBuildNumber2 };

    int i = 0;
    for (SharedBuildNumber sharedBuildNumber : set) {
        assertEquals("The build number is not correct.", values[i].getId(), sharedBuildNumber.getId());

        i++;
    }

    verify(this.service);
}

From source file:net.nicholaswilliams.java.teamcity.plugin.buildNumber.TestPluginConfigurationServiceDefault.java

@Test
public void testGetAllSharedBuildNumbersSortedByName01() {
    final ConfigurationEntity configuration = this.getConfiguration();

    SharedBuildNumberEntity sharedBuildNumber1 = new SharedBuildNumberEntity();
    sharedBuildNumber1.setId(1);/*from   ww w  .  j a  v  a2 s  .c o m*/
    sharedBuildNumber1.setName("This is a killer name!");
    configuration.addOrUpdateBuildNumber(sharedBuildNumber1);

    SharedBuildNumberEntity sharedBuildNumber2 = new SharedBuildNumberEntity();
    sharedBuildNumber2.setId(2);
    sharedBuildNumber2.setName("Hello, World.");
    configuration.addOrUpdateBuildNumber(sharedBuildNumber2);

    SharedBuildNumberEntity sharedBuildNumber3 = new SharedBuildNumberEntity();
    sharedBuildNumber3.setId(3);
    sharedBuildNumber3.setName("This is a cool name.");
    configuration.addOrUpdateBuildNumber(sharedBuildNumber3);

    replay(this.service);

    SortedSet<SharedBuildNumber> set = this.service.getAllSharedBuildNumbersSortedByName(false);

    assertNotNull("The set should not be null.", set);
    assertEquals("The set is the wrong size.", 3, set.size());

    SharedBuildNumberEntity[] values = new SharedBuildNumberEntity[] { sharedBuildNumber2, sharedBuildNumber3,
            sharedBuildNumber1 };

    int i = 0;
    for (SharedBuildNumber sharedBuildNumber : set) {
        assertEquals("The build number is not correct.", values[i].getId(), sharedBuildNumber.getId());

        i++;
    }

    verify(this.service);
}