Example usage for java.util LinkedHashSet contains

List of usage examples for java.util LinkedHashSet contains

Introduction

In this page you can find the example usage for java.util LinkedHashSet contains.

Prototype

boolean contains(Object o);

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:com.assemblade.opendj.Session.java

private Entry internalGet(String dn, SearchScope scope, String filter, LinkedHashSet<String> attributes)
        throws StorageException {
    final List<Entry> entries = new ArrayList<Entry>();
    try {// ww w  .  j av a  2  s. c o m
        List<Control> controls = new ArrayList<Control>();
        if (attributes.contains("aclRights") || attributes.contains("*")) {
            controls.add(new GetEffectiveRightsRequestControl(false, null, new ArrayList<String>()));
        }
        InternalSearchOperation searchResult = connection.processSearch(dn, scope,
                DereferencePolicy.NEVER_DEREF_ALIASES, 0, 0, false, filter, attributes, controls,
                new InternalSearchListener() {
                    public void handleInternalSearchEntry(InternalSearchOperation operation,
                            SearchResultEntry searchEntry) throws DirectoryException {
                        entries.add(searchEntry);
                    }

                    public void handleInternalSearchReference(InternalSearchOperation operation,
                            SearchResultReference reference) throws DirectoryException {
                    }
                });
        if (searchResult.getResultCode() == ResultCode.SUCCESS) {
            if (entries.size() == 1) {
                return entries.get(0);
            } else {
                log.debug("Got " + entries.size() + " entries for what should have been a single entry [" + dn
                        + "]");
                throw new StorageException(AssembladeErrorCode.ASB_0006);
            }
        } else {
            log.error("Failed to get entry [" + dn + "] because: " + searchResult.getErrorMessage().toString());
            dumpTree("dc=assemblade,dc=com", true, "(objectclass=*)");
            throw new StorageException(AssembladeErrorCode.ASB_0006);
        }
    } catch (DirectoryException e) {
        log.error("Exception thrown getting entry [" + dn + "]", e);
        throw new StorageException(AssembladeErrorCode.ASB_9999);
    }
}

From source file:org.chromium.content_shell.Shell.java

private void updateHistory(String url) {
    String json = mPref.getString("history", null);
    JSONArray array = new JSONArray();
    if (json != null) {
        try {/*from ww  w.j a  v a 2  s. c  om*/
            array = new JSONArray(json);
        } catch (JSONException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    LinkedHashSet<String> history = new LinkedHashSet<String>();
    for (int i = 0; i < array.length(); i++) {
        try {
            history.add(array.getString(i));
        } catch (JSONException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    if (history.contains(url)) {
        history.remove(url);
    }
    history.add(url);
    if (history.size() > 100) {
        String f = history.iterator().next();
        history.remove(f);
    }
    array = new JSONArray();
    for (String u : history) {
        array.put(u);
    }

    mPref.edit().putString("history", array.toString()).commit();
}

From source file:com.streamsets.datacollector.definition.ConfigDefinitionExtractor.java

/**
 * Returns true if child creates a dependency with any member(s) of dependencyAncestors.
 * Also adds the stringified cycle to the cycles list
 *///from  w  ww .j  a va  2 s.co  m
private boolean detectCycle(LinkedHashSet<String> dependencyAncestors, Set<String> cycles, final String child) {
    if (dependencyAncestors.contains(child)) {
        // Find index of the child in the ancestors list
        int index = -1;
        for (String s : dependencyAncestors) {
            index++;
            if (s.equals(child)) {
                break;
            }
        }
        // The cycle starts from the first time the child is seen in the ancestors list
        // and continues till the end of the list, followed by the child again.
        cycles.add(Joiner.on(" -> ").join(Iterables.skip(dependencyAncestors, index)) + " -> " + child);
        return true;
    }
    return false;
}

From source file:org.fusesource.meshkeeper.distribution.remoting.AbstractRemotingClient.java

private final <T> T exportInternal(T obj, String multicastAddress, Class<?>... serviceInterfaces)
        throws Exception {
    LinkedHashSet<Class<?>> interfaces = new LinkedHashSet<Class<?>>();
    if (serviceInterfaces == null || serviceInterfaces.length == 0) {
        collectDistributableInterfaces(obj.getClass(), interfaces);
    } else {/*from  w  w w .  j  a va2s. co m*/
        for (Class<?> serviceInterface : serviceInterfaces) {
            validateInterface(serviceInterface);
            interfaces.add(serviceInterface);
        }
    }

    //If the only interfaces is the Distributable interface itself, then we're
    //just trying to export the class:
    if (interfaces.size() == 0 || (interfaces.size() == 1 && interfaces.contains(Distributable.class))) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Exporting " + obj.getClass() + " with no service interfaces");
        }
        return (T) exportInterfaces(obj, multicastAddress, (Class<?>[]) null);
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("Exporting " + obj.getClass() + " as: " + interfaces);
    }

    Class<?>[] distributable = null;
    //System.out.println("Found distributable interfaces for: " + obj + ": " + interfaces);
    distributable = new Class<?>[interfaces.size()];
    interfaces.toArray(distributable);
    return (T) exportInterfaces(obj, multicastAddress, distributable);
}

From source file:org.rapidcontext.core.type.WebService.java

/**
 * Returns the HTTP methods supported for the specified request.
 * The OPTIONS method is always supported and the HEAD method is
 * automatically added if GET is supported.
 *
 * @param request        the request to check
 *
 * @return the array of HTTP method names supported
 */// www.ja va2  s  .  co m
public String[] methods(Request request) {
    LinkedHashSet set = new LinkedHashSet();
    set.add(METHOD.OPTIONS);
    set.addAll(Arrays.asList(methodsImpl(request)));
    for (int i = 0; i < matchers.size(); i++) {
        WebMatcher m = (WebMatcher) matchers.get(i);
        if (m.method() != null && m.match(request) > 0) {
            set.add(m.method());
        }
    }
    if (set.contains(METHOD.GET)) {
        set.add(METHOD.HEAD);
    }
    return (String[]) set.toArray(new String[set.size()]);
}

From source file:org.codehaus.mojo.jsimport.AbstractGenerateHtmlMojo.java

/**
 * Generate the file dependency properties and also populate a mapping of repository files to be copied into the
 * target folder.//  w w  w . ja  v a 2 s  .  co  m
 * 
 * @param sourceJsFolder the folder where the source js files reside.
 * @param mainSourceJsFolder the folder where the main source js files reside.
 * @param workFolder the folder where we can read/write work files that are durable between builds (and thus useful
 *            between builds).
 * @param mainWorkFolder as above but for a work folder belonging to the main scope.
 * @param scope scope the scope of the dependencies we are to search for.
 * @param localRepoFilesToCopy the mapping of source files that should be copied to the target folder
 * @param mainLocalRepoFilesToCopy as above but for repo files belonging to the main scope
 * @throws MojoExecutionException if there is an execution failure.
 */
private Properties generateProperties(File sourceJsFolder, File mainSourceJsFolder, File workFolder,
        File mainWorkFolder, Map<String, String> localRepoFilesToCopy,
        Map<String, String> mainLocalRepoFilesToCopy) throws MojoExecutionException {
    Properties fileDependencyProperties = new Properties();

    String sourceFolderPath = sourceJsFolder.getAbsolutePath();
    if (sourceFolderPath.length() == 0) {
        return fileDependencyProperties;
    }
    String mainSourceFolderPath = mainSourceJsFolder.getAbsolutePath();
    if (mainSourceFolderPath.length() == 0) {
        return fileDependencyProperties;
    }

    LocalRepositoryCollector localRepositoryCollector = new LocalRepositoryCollector(project, localRepository,
            new File[] { new File(workFolder, "www-zip"), new File(mainWorkFolder, "www-zip") });

    for (Map.Entry<String, LinkedHashSet<String>> entry : fileDependencies.entrySet()) {
        String jsFile = entry.getKey();

        // Make our source files available for script elements.
        if (jsFile.startsWith(sourceFolderPath)) {
            // Build a new set of imports for this current js file and all
            // of its imports and their imports etc.
            Set<String> visitedNodes = new HashSet<String>();
            LinkedHashSet<String> allImports = new LinkedHashSet<String>();
            String cyclicFilePath = buildImportsRecursively(visitedNodes, entry.getValue(), allImports);
            if (cyclicFilePath == null && allImports.contains(jsFile)) {
                cyclicFilePath = jsFile;
            }
            // Build a set of script statements for filtering into HTML
            // files.
            String closeOpenScriptDeclaration = "\"></script>\n<script type=\"text/javascript\" src=\"";

            StringBuilder propertyValue = new StringBuilder();
            for (String importFile : allImports) {
                // Make the file path relative.
                String relativeImportFile;
                if (importFile.startsWith(sourceFolderPath)) {
                    relativeImportFile = targetJsPath + importFile.substring(sourceFolderPath.length());
                } else if (importFile.startsWith(mainSourceFolderPath)) {
                    relativeImportFile = targetJsPath + importFile.substring(mainSourceFolderPath.length());
                } else {
                    // We don't appear to be looking at a project source file here so it must belong to one of our
                    // local repositories.
                    String localRepositoryPath = localRepositoryCollector.findLocalRepository(importFile);
                    if (localRepositoryPath != null) {
                        relativeImportFile = targetJsPath + importFile.substring(localRepositoryPath.length());
                        // Flag this file for copying as long as the file belongs to our scope. If the compile time
                        // dependencies are null then we are in compile scope. Otherwise we are in test scope, in
                        // which case we only copy the file if the dependency belongs to the test scope.
                        if (compileFileDependencies == null || //
                                (compileFileDependencies != null && //
                                        !compileFileDependencies.containsKey(importFile))) {
                            localRepoFilesToCopy.put(importFile, relativeImportFile);
                        } else {
                            mainLocalRepoFilesToCopy.put(importFile, relativeImportFile);
                        }
                    } else {
                        throw new MojoExecutionException("Unexpected import file path "
                                + "(not project relative or local repo): " + importFile);
                    }
                }

                // We're now in a position of formatting the file path to the user in situations where we detected a
                // problem earlier.
                if (importFile.equals(cyclicFilePath)) {
                    throw new MojoExecutionException("Cyclic reference found in: " + relativeImportFile);
                }

                if (propertyValue.length() > 0) {
                    propertyValue.append(closeOpenScriptDeclaration);
                }
                propertyValue.append(relativeImportFile);
            }
            if (propertyValue.length() > 0) {
                propertyValue.append(closeOpenScriptDeclaration);
            }

            // Properties are always site relative and expressed without a /js to make it simple for substitution.
            String propertyName;
            if (sourceFolderPath.length() > 1) {
                propertyName = jsFile.substring(sourceFolderPath.length() + 1);
            } else {
                propertyName = jsFile;
            }

            // Ensure that the properties are normalised to be OS independent.
            String normalisedPropertyName = propertyName.replace(File.separatorChar, '/');
            String normalisedPropertyValue = propertyValue.append(targetJsPath + "/" + normalisedPropertyName)
                    .toString() //
                    .replace(File.separatorChar, '/');

            // Finally, set the property.
            fileDependencyProperties.setProperty(normalisedPropertyName, normalisedPropertyValue);

            if (getLog().isDebugEnabled()) {
                getLog().debug("Generating script statements for files: " + allImports
                        + " relating to filter property: " + normalisedPropertyName);
            }

        }
    }

    return fileDependencyProperties;
}

From source file:ubic.gemma.loader.expression.geo.model.GeoValues.java

/**
 * Only needs to be called 'externally' if you know there is no data for the sample.
 * /* w w  w.  j  a  v a2 s.  com*/
 * @param sample
 * @param quantitationTypeIndex
 * @return
 */
private GeoPlatform addSample(GeoSample sample, Integer quantitationTypeIndex) {
    if (sample.getPlatforms().size() > 1) {
        throw new IllegalArgumentException(sample + ": Can't handle samples that use multiple platforms");
    }

    GeoPlatform platform = sample.getPlatforms().iterator().next();
    if (!sampleDimensions.containsKey(platform)) {
        sampleDimensions.put(platform, new HashMap<Object, LinkedHashSet<GeoSample>>());
    }

    Map<Object, LinkedHashSet<GeoSample>> samplePlatformMap = sampleDimensions.get(platform);
    if (!samplePlatformMap.containsKey(quantitationTypeIndex)) {
        samplePlatformMap.put(quantitationTypeIndex, new LinkedHashSet<GeoSample>());
    }

    LinkedHashSet<GeoSample> sampleQtMap = samplePlatformMap.get(quantitationTypeIndex);
    if (!sampleQtMap.contains(sample)) {
        sampleQtMap.add(sample);
    }
    return platform;
}

From source file:uk.gov.gchq.gaffer.spark.operation.dataframe.converter.schema.SchemaToStructTypeConverter.java

private void buildSchema() {
    LOGGER.info("Building Spark SQL schema for groups {}", StringUtils.join(groups, ','));
    for (final String group : groups) {
        final SchemaElementDefinition elementDefn = schema.getElement(group);
        final List<StructField> structFieldList = new ArrayList<>();
        if (elementDefn instanceof SchemaEntityDefinition) {
            entityOrEdgeByGroup.put(group, EntityOrEdge.ENTITY);
            final SchemaEntityDefinition entityDefinition = (SchemaEntityDefinition) elementDefn;
            final String vertexClass = schema.getType(entityDefinition.getVertex()).getClassString();
            final DataType vertexType = getType(vertexClass);
            if (vertexType == null) {
                throw new RuntimeException("Vertex must be a recognised type: found " + vertexClass);
            }/*w  w  w .jav  a2  s .co  m*/
            LOGGER.info("Group {} is an entity group - {} is of type {}", group, VERTEX_COL_NAME, vertexType);
            structFieldList.add(new StructField(VERTEX_COL_NAME, vertexType, true, Metadata.empty()));
        } else {
            entityOrEdgeByGroup.put(group, EntityOrEdge.EDGE);
            final SchemaEdgeDefinition edgeDefinition = (SchemaEdgeDefinition) elementDefn;
            final String srcClass = schema.getType(edgeDefinition.getSource()).getClassString();
            final String dstClass = schema.getType(edgeDefinition.getDestination()).getClassString();
            final DataType srcType = getType(srcClass);
            final DataType dstType = getType(dstClass);
            if (srcType == null || dstType == null) {
                throw new RuntimeException("Both source and destination must be recognised types: source was "
                        + srcClass + " destination was " + dstClass);
            }
            LOGGER.info("Group {} is an edge group - {} is of type {}, {} is of type {}", group, SRC_COL_NAME,
                    srcType, DST_COL_NAME, dstType);
            structFieldList.add(new StructField(SRC_COL_NAME, srcType, true, Metadata.empty()));
            structFieldList.add(new StructField(DST_COL_NAME, dstType, true, Metadata.empty()));
        }
        final Set<String> properties = elementDefn.getProperties();
        for (final String property : properties) {
            // Check if property is of a known type that can be handled by default
            final String propertyClass = elementDefn.getPropertyClass(property).getCanonicalName();
            DataType propertyType = getType(propertyClass);
            if (propertyType != null) {
                propertyNeedsConversion.put(property, needsConversion(propertyClass));
                structFieldList.add(new StructField(property, propertyType, true, Metadata.empty()));
                LOGGER.info("Property {} is of type {}", property, propertyType);
            } else {
                // Check if any of the provided converters can handle it
                if (converters != null) {
                    for (final Converter converter : converters) {
                        if (converter.canHandle(elementDefn.getPropertyClass(property))) {
                            propertyNeedsConversion.put(property, true);
                            propertyType = converter.convertedType();
                            converterByProperty.put(property, converter);
                            structFieldList
                                    .add(new StructField(property, propertyType, true, Metadata.empty()));
                            LOGGER.info("Property {} of type {} will be converted by {} to {}", property,
                                    propertyClass, converter.getClass().getName(), propertyType);
                            break;
                        }
                    }
                    if (propertyType == null) {
                        LOGGER.warn(
                                "Ignoring property {} as it is not a recognised type and none of the provided "
                                        + "converters can handle it",
                                property);
                    }
                }
            }
        }
        structTypeByGroup.put(group,
                new StructType(structFieldList.toArray(new StructField[structFieldList.size()])));
    }
    // Create reverse map of field name to StructField
    final Map<String, Set<StructField>> fieldToStructs = new HashMap<>();
    for (final String group : groups) {
        final StructType groupSchema = structTypeByGroup.get(group);
        for (final String field : groupSchema.fieldNames()) {
            if (fieldToStructs.get(field) == null) {
                fieldToStructs.put(field, new HashSet<StructField>());
            }
            fieldToStructs.get(field).add(groupSchema.apply(field));
        }
    }
    // Check consistency, i.e. if the same field appears in multiple groups then the types are consistent
    for (final Entry<String, Set<StructField>> entry : fieldToStructs.entrySet()) {
        final Set<StructField> schemas = entry.getValue();
        if (schemas.size() > 1) {
            throw new IllegalArgumentException("Inconsistent fields: the field " + entry.getKey()
                    + " has more than one definition: " + StringUtils.join(schemas, ','));
        }
    }
    // Merge schemas for groups together - fields should appear in the order the groups were provided
    final LinkedHashSet<StructField> fields = new LinkedHashSet<>();
    fields.add(new StructField(GROUP, DataTypes.StringType, false, Metadata.empty()));
    usedProperties.add(GROUP);
    for (final String group : groups) {
        final StructType groupSchema = structTypeByGroup.get(group);
        for (final String field : groupSchema.fieldNames()) {
            final StructField struct = groupSchema.apply(field);
            // Add struct to fields unless it has already been added
            if (!fields.contains(struct)) {
                fields.add(struct);
                usedProperties.add(field);
            }
        }
    }
    structType = new StructType(fields.toArray(new StructField[fields.size()]));
    LOGGER.info("Schema is {}", structType);
    LOGGER.debug("properties -> conversion: {}", StringUtils.join(propertyNeedsConversion.entrySet(), ','));
}

From source file:ch.puzzle.itc.mobiliar.presentation.propertyEdit.EditPropertyView.java

public void assignPropertyTypeId() {
    if (propertyTypes != null) {
        if (propertyTypeId == 0) {
            // Custom type
            propertyDescriptor.setPropertyTypeEntity(null);
            propertyDescriptor.setValidationLogic(null);
            return;
        }//  ww  w.jav  a  2s  .c  o  m
        for (PropertyTypeEntity t : propertyTypes) {
            if (t.getId() != null && t.getId().equals(propertyTypeId)) {
                propertyDescriptor.setPropertyTypeEntity(t);
                propertyDescriptor.setEncrypt(t.isEncrypt());
                propertyDescriptor.setValidationLogic(t.getValidationRegex());

                // tags
                StringBuffer newTagsSb = new StringBuffer();
                LinkedHashSet<String> existingTags = new LinkedHashSet<>();

                if (!StringUtils.isEmpty(propertyTagsString)) {
                    newTagsSb.append(propertyTagsString);
                    String[] tags = propertyTagsString.split(",");
                    for (String tag : tags) {
                        existingTags.add(tag);
                    }
                }
                for (PropertyTagEntity tag : t.getPropertyTags()) {
                    if (!existingTags.contains(tag.getName())) {
                        newTagsSb.append(tag.getName()).append(",");
                    }
                }
                propertyTagsString = newTagsSb.toString();
            }
        }
    }
}

From source file:org.apache.marmotta.platform.security.services.SecurityServiceImpl.java

private Configuration loadProfile(String profile, LinkedHashSet<String> profiles) {
    URL securityConfigUrl = this.getClass().getClassLoader()
            .getResource("security-profile." + profile + ".properties");
    if (securityConfigUrl != null) {
        try {//from   w  w  w .  j a  va  2 s .c o  m
            Configuration securityConfig = null;
            securityConfig = new PropertiesConfiguration(securityConfigUrl);

            if (securityConfig.containsKey("security.profile.base")) {
                final String baseP = securityConfig.getString("security.profile.base");
                if (profiles.contains(baseP)) {
                    log.warn("Cycle in security configuration detected: {} -> {}", profiles, baseP);
                    return securityConfig;
                } else {
                    profiles.add(baseP);
                    final Configuration baseProfile = loadProfile(baseP, profiles);

                    for (Iterator<String> keys = securityConfig.getKeys(); keys.hasNext();) {
                        String key = keys.next();

                        baseProfile.setProperty(key, securityConfig.getProperty(key));
                    }
                    return baseProfile;
                }
            } else {
                return securityConfig;
            }
        } catch (ConfigurationException e) {
            log.error("error parsing security-profile.{}.properties file at {}: {}",
                    new Object[] { profile, securityConfigUrl, e.getMessage() });
        }

    }
    return null;
}