Example usage for java.util SortedSet contains

List of usage examples for java.util SortedSet contains

Introduction

In this page you can find the example usage for java.util SortedSet contains.

Prototype

boolean contains(Object o);

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:net.pms.dlna.protocolinfo.DeviceProtocolInfo.java

/**
 * Returns {@code true} if the {@link Set} for the given
 * {@link DeviceProtocolInfoSource} contains the specified element.
 *
 * @param type the {@link DeviceProtocolInfoSource} type to check.
 * @param protocolInfo the element whose presence is to be tested.
 * @return {@code true} if the {@link Set} for the given
 *         {@link DeviceProtocolInfoSource} contains the specified element,
 *         {@code false} otherwise./*from   ww  w  .ja  v a 2s  .com*/
 */
public boolean contains(DeviceProtocolInfoSource<?> type, ProtocolInfo protocolInfo) {
    setsLock.readLock().lock();
    try {
        SortedSet<ProtocolInfo> set = protocolInfoSets.get(type);
        return set == null ? false : set.contains(protocolInfo);
    } finally {
        setsLock.readLock().unlock();
    }
}

From source file:net.sourceforge.subsonic.service.SonosService.java

@Override
public RemoveFromContainerResult removeFromContainer(String id, String indices, String updateId) {
    if (id.startsWith(ID_PLAYLIST_PREFIX)) {
        int playlistId = Integer.parseInt(id.replace(ID_PLAYLIST_PREFIX, ""));
        Playlist playlist = playlistService.getPlaylist(playlistId);
        if (playlist != null && playlist.getUsername().equals(getUsername())) {
            SortedSet<Integer> indicesToRemove = parsePlaylistIndices(indices);
            List<MediaFile> songs = playlistService.getFilesInPlaylist(playlistId);
            List<MediaFile> updatedSongs = new ArrayList<MediaFile>();
            for (int i = 0; i < songs.size(); i++) {
                if (!indicesToRemove.contains(i)) {
                    updatedSongs.add(songs.get(i));
                }//from w w w  . j  av  a2s.c o  m
            }
            playlistService.setFilesInPlaylist(playlistId, updatedSongs);
        }
    }
    return new RemoveFromContainerResult();
}

From source file:org.apache.tephra.hbase.txprune.DataJanitorState.java

/**
 * Delete prune upper bounds for the regions that are not in the given exclude set, and the
 * prune upper bound is less than the given value.
 * After the invalid list is pruned up to deletionPruneUpperBound, we do not need entries for regions that have
 * prune upper bound less than deletionPruneUpperBound. We however limit the deletion to only regions that are
 * no longer in existence (due to deletion, etc.), to avoid update/delete race conditions.
 *
 * @param deletionPruneUpperBound prune upper bound below which regions will be deleted
 * @param excludeRegions set of regions that should not be deleted
 * @throws IOException when not able to delete data in HBase
 */// w ww  . ja va 2 s .co m
public void deletePruneUpperBounds(long deletionPruneUpperBound, SortedSet<byte[]> excludeRegions)
        throws IOException {
    try (Table stateTable = stateTableSupplier.get()) {
        byte[] startRow = makeRegionKey(EMPTY_BYTE_ARRAY);
        Scan scan = new Scan(startRow, REGION_KEY_PREFIX_STOP);
        scan.addColumn(FAMILY, PRUNE_UPPER_BOUND_COL);

        try (ResultScanner scanner = stateTable.getScanner(scan)) {
            Result next;
            while ((next = scanner.next()) != null) {
                byte[] region = getRegionFromKey(next.getRow());
                if (!excludeRegions.contains(region)) {
                    byte[] timeBytes = next.getValue(FAMILY, PRUNE_UPPER_BOUND_COL);
                    if (timeBytes != null) {
                        long pruneUpperBoundRegion = Bytes.toLong(timeBytes);
                        if (pruneUpperBoundRegion < deletionPruneUpperBound) {
                            stateTable.delete(new Delete(next.getRow()));
                        }
                    }
                }
            }
        }
    }
}

From source file:com.okta.swagger.codegen.AbstractOktaJavaClientCodegen.java

private void handleOktaLinkedOperations(Swagger swagger) {
    // we want to move any operations defined by the 'x-okta-operations' or 'x-okta-crud' vendor extension to the model
    Map<String, Model> modelMap = swagger.getDefinitions().entrySet().stream()
            .filter(e -> e.getValue().getVendorExtensions().containsKey("x-okta-operations")
                    || e.getValue().getVendorExtensions().containsKey("x-okta-crud"))
            .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));

    modelMap.forEach((k, model) -> {//w w w . ja v a 2s  . c  om
        List<ObjectNode> linkNodes = new ArrayList<>();

        addAllIfNotNull(linkNodes, (List<ObjectNode>) model.getVendorExtensions().get("x-okta-operations"));
        addAllIfNotNull(linkNodes, (List<ObjectNode>) model.getVendorExtensions().get("x-okta-crud"));

        Map<String, CodegenOperation> operationMap = new HashMap<>();

        linkNodes.forEach(n -> {
            String operationId = n.get("operationId").textValue();

            // find the swagger path operation
            swagger.getPaths().forEach((pathName, path) -> {
                Optional<Map.Entry<HttpMethod, Operation>> operationEntry = path.getOperationMap().entrySet()
                        .stream().filter(e -> e.getValue().getOperationId().equals(operationId)).findFirst();

                if (operationEntry.isPresent()) {

                    Operation operation = operationEntry.get().getValue();

                    CodegenOperation cgOperation = fromOperation(pathName,
                            operationEntry.get().getKey().name().toLowerCase(), operation,
                            swagger.getDefinitions(), swagger);

                    boolean canLinkMethod = true;

                    JsonNode aliasNode = n.get("alias");
                    if (aliasNode != null) {
                        String alias = aliasNode.textValue();
                        cgOperation.vendorExtensions.put("alias", alias);

                        if ("update".equals(alias)) {
                            model.getVendorExtensions().put("saveable", true);
                        } else if ("delete".equals(alias)) {
                            model.getVendorExtensions().put("deletable", true);
                            cgOperation.vendorExtensions.put("selfDelete", true);
                        } else if ("read".equals(alias) || "create".equals(alias)) {
                            canLinkMethod = false;
                        }
                    }

                    // we do NOT link read or create methods, those need to be on the parent object
                    if (canLinkMethod) {

                        // now any params that match the models we need to use the model value directly
                        // for example if the path contained {id} we would call getId() instead

                        Map<String, String> argMap = createArgMap(n);

                        List<CodegenParameter> cgOtherPathParamList = new ArrayList<>();
                        List<CodegenParameter> cgParamAllList = new ArrayList<>();
                        List<CodegenParameter> cgParamModelList = new ArrayList<>();

                        cgOperation.pathParams.forEach(param -> {

                            if (argMap.containsKey(param.paramName)) {

                                String paramName = argMap.get(param.paramName);
                                cgParamModelList.add(param);

                                if (model.getProperties() != null) {
                                    CodegenProperty cgProperty = fromProperty(paramName,
                                            model.getProperties().get(paramName));
                                    param.vendorExtensions.put("fromModel", cgProperty);
                                } else {
                                    System.err.println("Model '" + model.getTitle() + "' has no properties");
                                }

                            } else {
                                cgOtherPathParamList.add(param);
                            }
                        });

                        // remove the body param if the body is the object itself
                        for (Iterator<CodegenParameter> iter = cgOperation.bodyParams.iterator(); iter
                                .hasNext();) {
                            CodegenParameter bodyParam = iter.next();
                            if (argMap.containsKey(bodyParam.paramName)) {
                                cgOperation.vendorExtensions.put("bodyIsSelf", true);
                                iter.remove();
                            }
                        }

                        // do not add the parrent path params to the list (they will be parsed from the href)
                        SortedSet<String> pathParents = parentPathParams(n);
                        cgOtherPathParamList.forEach(param -> {
                            if (!pathParents.contains(param.paramName)) {
                                cgParamAllList.add(param);
                            }
                        });

                        if (!pathParents.isEmpty()) {
                            cgOperation.vendorExtensions.put("hasPathParents", true);
                            cgOperation.vendorExtensions.put("pathParents", pathParents);
                        }

                        cgParamAllList.addAll(cgOperation.queryParams);
                        cgParamAllList.addAll(cgOperation.bodyParams);

                        // set all params to have more
                        cgParamAllList.forEach(param -> param.hasMore = true);

                        // then grab the last one and mark it as the last
                        if (!cgParamAllList.isEmpty()) {
                            CodegenParameter param = cgParamAllList.get(cgParamAllList.size() - 1);
                            param.hasMore = false;
                        }

                        cgOperation.vendorExtensions.put("allParams", cgParamAllList);
                        cgOperation.vendorExtensions.put("fromModelPathParams", cgParamModelList);

                        addOptionalExtension(cgOperation, cgParamAllList);

                        operationMap.put(cgOperation.operationId, cgOperation);

                        // mark the operation as moved so we do NOT add it to the client
                        operation.getVendorExtensions().put("moved", true);

                    }
                }
            });
        });

        model.getVendorExtensions().put("operations", operationMap.values());
    });
}

From source file:org.jasig.schedassist.model.AvailableBlockBuilderTest.java

/**
 * Create 2 adjacent blocks visitor limit set to 10.
 * Pass them into combine, assert they come out 1 combined block.
 * /*  ww w  .jav a 2  s  . c om*/
 * @throws Exception
 */
@Test
public void testCombineVisitorLimit10() throws Exception {
    AvailableBlock block1 = AvailableBlockBuilder.createBlock("20091007-1200", "20091007-1230", 10);
    assertEquals(30, block1.getDurationInMinutes());
    AvailableBlock block2 = AvailableBlockBuilder.createBlock("20091007-1230", "20091007-1300", 10);
    assertEquals(30, block2.getDurationInMinutes());
    SortedSet<AvailableBlock> smallBlocks = new TreeSet<AvailableBlock>();
    smallBlocks.add(block1);
    smallBlocks.add(block2);
    SortedSet<AvailableBlock> resultCombined = AvailableBlockBuilder.combine(smallBlocks);
    assertEquals(resultCombined.size(), 1);
    AvailableBlock expectedCombined = AvailableBlockBuilder.createBlock("20091007-1200", "20091007-1300", 10);
    assertTrue(resultCombined.contains(expectedCombined));
    assertEquals(60, expectedCombined.getDurationInMinutes());
}

From source file:org.voltdb.sysprocs.UpdateApplicationCatalog.java

/**
 * Use EE stats to get the row counts for all tables in this partition.
 * Check the provided list of tables that need to be empty against actual
 * row counts. If any of them aren't empty, stop the catalog update and
 * return the pre-provided error message that corresponds to the non-empty
 * tables.// w  w  w .jav  a2s. co  m
 *
 * @param tablesThatMustBeEmpty List of table names that must be empty.
 * @param reasonsForEmptyTables Error messages to return if that table isn't
 * empty.
 * @param context
 */
protected void checkForNonEmptyTables(String[] tablesThatMustBeEmpty, String[] reasonsForEmptyTables,
        SystemProcedureExecutionContext context) {
    assert (tablesThatMustBeEmpty != null);
    // no work to do if no tables need to be empty
    if (tablesThatMustBeEmpty.length == 0) {
        return;
    }
    assert (reasonsForEmptyTables != null);
    assert (reasonsForEmptyTables.length == tablesThatMustBeEmpty.length);

    // fetch the id of the tables that must be empty from the
    //  current catalog (not the new one).
    CatalogMap<Table> tables = context.getDatabase().getTables();
    int[] tableIds = new int[tablesThatMustBeEmpty.length];
    int i = 0;
    for (String tableName : tablesThatMustBeEmpty) {
        Table table = tables.get(tableName);
        if (table == null) {
            String msg = String.format("@UpdateApplicationCatalog was checking to see if table %s was empty, "
                    + "presumably as part of a schema change, and it failed to find the table "
                    + "in the current catalog context.", tableName);
            throw new SpecifiedException(ClientResponse.UNEXPECTED_FAILURE, msg);
        }
        tableIds[i++] = table.getRelativeIndex();
    }

    // get the table stats for these tables from the EE
    final VoltTable[] s1 = context.getSiteProcedureConnection().getStats(StatsSelector.TABLE, tableIds, false,
            getTransactionTime().getTime());
    if ((s1 == null) || (s1.length == 0)) {
        String tableNames = StringUtils.join(tablesThatMustBeEmpty, ", ");
        String msg = String.format("@UpdateApplicationCatalog was checking to see if tables (%s) were empty ,"
                + "presumably as part of a schema change, but failed to get the row counts "
                + "from the native storage engine.", tableNames);
        throw new SpecifiedException(ClientResponse.UNEXPECTED_FAILURE, msg);
    }
    VoltTable stats = s1[0];
    SortedSet<String> nonEmptyTables = new TreeSet<String>();

    // find all empty tables
    while (stats.advanceRow()) {
        long tupleCount = stats.getLong("TUPLE_COUNT");
        String tableName = stats.getString("TABLE_NAME");
        if (tupleCount > 0 && !"StreamedTable".equals(stats.getString("TABLE_TYPE"))) {
            nonEmptyTables.add(tableName);
        }
    }

    // return an error containing the names of all non-empty tables
    // via the propagated reasons why each needs to be empty
    if (!nonEmptyTables.isEmpty()) {
        String msg = "Unable to make requested schema change:\n";
        for (i = 0; i < tablesThatMustBeEmpty.length; ++i) {
            if (nonEmptyTables.contains(tablesThatMustBeEmpty[i])) {
                msg += reasonsForEmptyTables[i] + "\n";
            }
        }
        throw new SpecifiedException(ClientResponse.GRACEFUL_FAILURE, msg);
    }
}

From source file:org.apache.tephra.hbase.txprune.DataJanitorState.java

/**
 * Gets a list of {@link RegionPruneInfo} for given regions. Returns all regions if the given regions set is null.
 *
 * @param regions a set of regions//w w w.  j a va  2 s.  co  m
 * @return list of {@link RegionPruneInfo}s.
 * @throws IOException when not able to read the data from HBase
 */
public List<RegionPruneInfo> getPruneInfoForRegions(@Nullable SortedSet<byte[]> regions) throws IOException {
    List<RegionPruneInfo> regionPruneInfos = new ArrayList<>();
    try (Table stateTable = stateTableSupplier.get()) {
        byte[] startRow = makeRegionKey(EMPTY_BYTE_ARRAY);
        Scan scan = new Scan(startRow, REGION_KEY_PREFIX_STOP);
        scan.addColumn(FAMILY, PRUNE_UPPER_BOUND_COL);

        try (ResultScanner scanner = stateTable.getScanner(scan)) {
            Result next;
            while ((next = scanner.next()) != null) {
                byte[] region = getRegionFromKey(next.getRow());
                if (regions == null || regions.contains(region)) {
                    Cell cell = next.getColumnLatestCell(FAMILY, PRUNE_UPPER_BOUND_COL);
                    if (cell != null) {
                        byte[] pruneUpperBoundBytes = CellUtil.cloneValue(cell);
                        long timestamp = cell.getTimestamp();
                        regionPruneInfos.add(new RegionPruneInfo(region, Bytes.toStringBinary(region),
                                Bytes.toLong(pruneUpperBoundBytes), timestamp));
                    }
                }
            }
        }
    }
    return Collections.unmodifiableList(regionPruneInfos);
}

From source file:uk.ac.ebi.fg.jobs.OntologySimilarityJob.java

public void doExecute(JobExecutionContext jobExecutionContext)
        throws JobExecutionException, InterruptedException {
    JobDataMap dataMap = jobExecutionContext.getJobDetail().getJobDataMap();
    Map<ExperimentId, SortedSet<EfoTerm>> smallMap = (Map<ExperimentId, SortedSet<EfoTerm>>) dataMap
            .get("smallMap");
    OntologyDistanceCalculator distanceCalculator = (OntologyDistanceCalculator) dataMap
            .get("distanceCalculator");
    Map<String, SortedSet<ExperimentId>> uriToExpMap = (ConcurrentHashMap<String, SortedSet<ExperimentId>>) dataMap
            .get("uriToExpMap");
    Map<ExperimentId, SortedSet<EfoTerm>> expToURIMap = (ConcurrentHashMap<ExperimentId, SortedSet<EfoTerm>>) dataMap
            .get("expToURIMap");
    Map<ExperimentId, SortedSet<ExperimentId>> ontologyResults = (ConcurrentHashMap<ExperimentId, SortedSet<ExperimentId>>) dataMap
            .get("ontologyResults");
    lowPriorityURIs = (SortedSet<String>) dataMap.get("lowPriorityOntologyURIs");
    int counter = (Integer) dataMap.get("counter");
    Configuration properties = (Configuration) dataMap.get("properties");

    final int maxOWLSimilarityCount = properties.getInt("max_displayed_OWL_similarities");
    final int smallExpAssayCountLimit = properties.getInt("small_experiment_assay_count_limit");
    final float minCalculatedOntologyDistance = properties.getFloat("minimal_calculated_ontology_distance");

    logger.info("Started " + (counter - smallMap.size()) + " - " + counter + " ontology similarity jobs");

    for (Map.Entry<ExperimentId, SortedSet<EfoTerm>> entry : smallMap.entrySet()) {
        ExperimentId experiment = entry.getKey();
        SortedSet<ExperimentId> resultExpSimilaritySet = new TreeSet<ExperimentId>();

        for (EfoTerm efoTerm : entry.getValue()) {
            Set<OntologySimilarityResult> similars = distanceCalculator.getSimilarNodes(efoTerm.getUri());

            if (null != similars) {
                for (OntologySimilarityResult ontologySimilarityResult : similars) {
                    int distance = ontologySimilarityResult.getDistance();
                    SortedSet<ExperimentId> similarExperiments = uriToExpMap
                            .get(ontologySimilarityResult.getURI());

                    if (similarExperiments != null) {
                        for (ExperimentId exp : similarExperiments) {
                            if (experiment.getSpecies().equals(exp.getSpecies()) && !experiment.equals(exp)) {
                                if (resultExpSimilaritySet.contains(exp)) {
                                    ExperimentId expClone = resultExpSimilaritySet.tailSet(exp).first().clone();
                                    resultExpSimilaritySet.remove(exp);
                                    resultExpSimilaritySet.add(
                                            setDistance(expClone, ontologySimilarityResult.getURI(), distance));
                                } else {
                                    ExperimentId expClone = exp.clone();
                                    resultExpSimilaritySet.add(
                                            setDistance(expClone, ontologySimilarityResult.getURI(), distance));
                                }//from   w  w  w  . j ava 2s.c  o m
                            }
                        }
                    }
                }
            }
        }

        // store information for maximal score calculation
        ExperimentId experimentClone = experiment.clone();
        for (EfoTerm efoTerm : expToURIMap.get(experimentClone)) {
            if (lowPriorityURIs.contains(efoTerm.getUri()))
                experimentClone.setLowPriorityMatchCount(experimentClone.getLowPriorityMatchCount() + 1);
            else
                experimentClone.setDist0Count(experimentClone.getDist0Count() + 1);

            experimentClone.setNumbOfMatches(experimentClone.getNumbOfMatches() + 1);
        }

        ontologyResults.put(experimentClone, cleanResults(experimentClone, resultExpSimilaritySet,
                smallExpAssayCountLimit, maxOWLSimilarityCount, minCalculatedOntologyDistance, expToURIMap));

        Thread.currentThread().wait(1);
    }

    logger.info("Finished " + (counter - smallMap.size()) + " - " + counter + " ontology similarity jobs");

    smallMap.clear();
}

From source file:org.apache.tephra.hbase.txprune.DataJanitorState.java

/**
 * Return regions that were recorded as empty after the given time.
 *
 * @param time time in milliseconds// w  w w .ja  va2s  . co m
 * @param includeRegions If not null, the returned set will be an intersection of the includeRegions set
 *                       and the empty regions after the given time
 */
public SortedSet<byte[]> getEmptyRegionsAfterTime(long time, @Nullable SortedSet<byte[]> includeRegions)
        throws IOException {
    SortedSet<byte[]> emptyRegions = new TreeSet<>(Bytes.BYTES_COMPARATOR);
    try (Table stateTable = stateTableSupplier.get()) {
        Scan scan = new Scan(makeEmptyRegionTimeKey(Bytes.toBytes(time + 1), EMPTY_BYTE_ARRAY),
                EMPTY_REGION_TIME_KEY_PREFIX_STOP);
        scan.addColumn(FAMILY, EMPTY_REGION_TIME_COL);

        try (ResultScanner scanner = stateTable.getScanner(scan)) {
            Result next;
            while ((next = scanner.next()) != null) {
                byte[] emptyRegion = getEmptyRegionFromKey(next.getRow());
                if (includeRegions == null || includeRegions.contains(emptyRegion)) {
                    emptyRegions.add(emptyRegion);
                }
            }
        }
    }
    return Collections.unmodifiableSortedSet(emptyRegions);
}

From source file:com.revetkn.ios.analyzer.ArtworkAnalyzer.java

protected void detectRetinaAndNonretinaImages(ApplicationArtwork applicationArtwork) {
    SortedSet<String> allImageFilenames = extractFilenames(applicationArtwork.getAllImageFiles());
    SortedSet<File> retinaImageFiles = new TreeSet<File>();
    SortedSet<File> nonretinaImageFiles = new TreeSet<File>();
    SortedSet<File> nonretinaImageFilesMissingRetinaImages = new TreeSet<File>();
    SortedSet<File> retinaImageFilesMissingNonretinaImages = new TreeSet<File>();

    for (String imageFilename : allImageFilenames) {
        int lastIndexOf2x = imageFilename.lastIndexOf("@2x");

        // Nonretina; search for a retina version
        if (lastIndexOf2x == -1) {
            nonretinaImageFiles.add(new File(imageFilename));

            if (!allImageFilenames.contains(retinaImageFilename(imageFilename)))
                nonretinaImageFilesMissingRetinaImages.add(new File(imageFilename));
        } else {//from  w  w w  .  j a  va2  s .c o  m
            retinaImageFiles.add(new File(imageFilename));

            String nonretinaImageFilename = imageFilename.replace("@2x", "");

            if (!allImageFilenames.contains(nonretinaImageFilename))
                retinaImageFilesMissingNonretinaImages.add(new File(imageFilename));
        }
    }

    applicationArtwork.setRetinaImageFiles(retinaImageFiles);
    applicationArtwork.setNonretinaImageFiles(nonretinaImageFiles);
    applicationArtwork.setNonretinaImageFilesMissingRetinaImages(nonretinaImageFilesMissingRetinaImages);
    applicationArtwork.setRetinaImageFilesMissingNonretinaImages(retinaImageFilesMissingNonretinaImages);
}