Example usage for java.util LinkedHashSet add

List of usage examples for java.util LinkedHashSet add

Introduction

In this page you can find the example usage for java.util LinkedHashSet add.

Prototype

boolean add(E e);

Source Link

Document

Adds the specified element to this set if it is not already present (optional operation).

Usage

From source file:org.zmlx.hg4idea.repo.HgRepositoryReader.java

@NotNull
public Map<String, LinkedHashSet<Hash>> readBranches() {
    Map<String, LinkedHashSet<Hash>> branchesWithHashes = new HashMap<>();
    // Set<String> branchNames = new HashSet<String>();
    if (isBranchInfoAvailable()) {
        Pattern activeBranchPattern = myStatusInBranchFile ? HASH_STATUS_NAME : HASH_NAME;
        String[] branchesWithHeads = DvcsUtil.tryLoadFileOrReturn(myBranchHeadsFile, "").split("\n");
        // first one - is a head revision: head hash + head number;
        for (int i = 1; i < branchesWithHeads.length; ++i) {
            Matcher matcher = activeBranchPattern.matcher(branchesWithHeads[i]);
            if (matcher.matches()) {
                String name = matcher.group(2);
                if (branchesWithHashes.containsKey(name)) {
                    branchesWithHashes.get(name).add(myVcsObjectsFactory.createHash(matcher.group(1)));
                } else {
                    LinkedHashSet<Hash> hashes = new LinkedHashSet<>();
                    hashes.add(myVcsObjectsFactory.createHash(matcher.group(1)));
                    branchesWithHashes.put(name, hashes);
                }/*from   ww w  .j a  v  a2 s . com*/
            }
        }
    }
    return branchesWithHashes;
}

From source file:org.apereo.portal.spring.security.RemoteUserSettingFilter.java

@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
        throws IOException, ServletException {
    final String remoteUser = StringUtils.trimToNull(FileUtils.readFileToString(this.remoteUserFile));

    if (remoteUser != null) {
        request = new HttpServletRequestWrapper((HttpServletRequest) request) {
            /* (non-Javadoc)
             * @see javax.servlet.http.HttpServletRequestWrapper#getRemoteUser()
             *//*from  w ww .  ja va2  s.c  o m*/
            @Override
            public String getRemoteUser() {
                return remoteUser;
            }

            /* (non-Javadoc)
             * @see javax.servlet.http.HttpServletRequestWrapper#getHeader(java.lang.String)
             */
            @Override
            public String getHeader(String name) {
                if ("REMOTE_USER".equals(name)) {
                    return remoteUser;
                }
                return super.getHeader(name);
            }

            /* (non-Javadoc)
             * @see javax.servlet.http.HttpServletRequestWrapper#getHeaders(java.lang.String)
             */
            @Override
            public Enumeration<String> getHeaders(String name) {
                if ("REMOTE_USER".equals(name)) {
                    return Iterators.asEnumeration(Collections.singleton(remoteUser).iterator());
                }
                return super.getHeaders(name);
            }

            /* (non-Javadoc)
             * @see javax.servlet.http.HttpServletRequestWrapper#getHeaderNames()
             */
            @Override
            public Enumeration<String> getHeaderNames() {
                final LinkedHashSet<String> headers = new LinkedHashSet<String>();
                for (final Enumeration<String> headersEnum = super.getHeaderNames(); headersEnum
                        .hasMoreElements();) {
                    headers.add(headersEnum.nextElement());
                }
                headers.add("REMOTE_USER");

                return Iterators.asEnumeration(headers.iterator());
            }

            /* (non-Javadoc)
             * @see javax.servlet.http.HttpServletRequestWrapper#getIntHeader(java.lang.String)
             */
            @Override
            public int getIntHeader(String name) {
                if ("REMOTE_USER".equals(name)) {
                    return Integer.valueOf(remoteUser);
                }
                return super.getIntHeader(name);
            }
        };
    }

    chain.doFilter(request, response);
}

From source file:com.asual.summer.core.resource.CompositeResource.java

public List<CompositeResource> getChildren() {
    if (children == null) {
        LinkedHashSet<CompositeResource> set = new LinkedHashSet<CompositeResource>();
        Map<String, CompositeResource> beans = BeanUtils.getBeansOfType(CompositeResource.class);
        for (CompositeResource resource : beans.values()) {
            if (this.equals(resource.getParent())) {
                set.add(resource);
            }/*  ww  w .  ja  v  a2s  .c  om*/
        }
        children = new ArrayList<CompositeResource>(set);
        OrderComparator.sort(children);
    }
    return children;
}

From source file:com.github.tomakehurst.wiremock.servlet.HttpServletRequestAdapter.java

@SuppressWarnings("unchecked")
@Override//from ww w.ja va  2 s.  c o m
public Set<String> getAllHeaderKeys() {
    LinkedHashSet<String> headerKeys = new LinkedHashSet<String>();
    for (Enumeration<String> headerNames = request.getHeaderNames(); headerNames.hasMoreElements();) {
        headerKeys.add(headerNames.nextElement());
    }

    return headerKeys;
}

From source file:org.codice.ddf.spatial.ogc.csw.catalog.transformer.GmdTransformer.java

private LinkedHashSet<Path> buildPaths() {
    LinkedHashSet<Path> paths = new LinkedHashSet<>();

    paths.add(new Path(GmdMetacardType.FILE_IDENTIFIER_PATH));

    Arrays.asList(GmdMetacardType.FILE_IDENTIFIER_PATH, GmdMetacardType.DATE_TIME_STAMP_PATH,
            GmdMetacardType.DATE_STAMP_PATH, GmdMetacardType.CODE_LIST_VALUE_PATH,
            GmdMetacardType.CRS_AUTHORITY_PATH, GmdMetacardType.CRS_VERSION_PATH, GmdMetacardType.CRS_CODE_PATH,
            GmdMetacardType.TITLE_PATH, GmdMetacardType.ABSTRACT_PATH, GmdMetacardType.FORMAT_PATH,
            GmdMetacardType.LINKAGE_URI_PATH, GmdMetacardType.KEYWORD_PATH, GmdMetacardType.TOPIC_CATEGORY_PATH,
            GmdMetacardType.BBOX_WEST_LON_PATH, GmdMetacardType.BBOX_EAST_LON_PATH,
            GmdMetacardType.BBOX_SOUTH_LAT_PATH, GmdMetacardType.BBOX_NORTH_LAT_PATH,
            GmdMetacardType.POINT_OF_CONTACT_PATH).forEach(path -> {
                paths.add(toPath(path));
            });//from  w  ww  .j  av  a  2  s. c  om

    return paths;
}

From source file:com.smartitengineering.version.impl.jgit.service.impl.MetaRCSServiceImpl.java

public Set<com.smartitengineering.version.api.Commit> searchForCommits(Collection<QueryParameter> parameters) {
    Collection<QueryParameter> params = parameters == null ? Collections.<QueryParameter>emptyList()
            : parameters;//from w w w.ja v a2s.  com
    List<Commit> commits = commitReader.getList(params.toArray(new QueryParameter[0]));
    LinkedHashSet<com.smartitengineering.version.api.Commit> result = new LinkedHashSet<com.smartitengineering.version.api.Commit>();
    if (commits != null) {
        for (Commit commit : commits) {
            result.add(MetaFactory.transformMetaCommit(commit));
        }
    }
    return result;
}

From source file:org.springsource.ide.eclipse.boot.maven.analyzer.graph.DirectedGraph.java

public Collection<Object> getNodes() {
    final LinkedHashSet<Object> nodes = new LinkedHashSet<Object>();
    eachEdge(new EdgeAction() {
        @Override/*from  www. j  a v  a 2  s .  com*/
        public void run(Object from, Object to) {
            nodes.add(from);
            nodes.add(to);
        }
    });
    return nodes;
}

From source file:uk.gov.gchq.gaffer.spark.operation.dataframe.converter.schema.SchemaToStructTypeConverter.java

private void buildSchema() {
    LOGGER.info("Building Spark SQL schema for groups {}", StringUtils.join(groups, ','));
    for (final String group : groups) {
        final SchemaElementDefinition elementDefn = schema.getElement(group);
        final List<StructField> structFieldList = new ArrayList<>();
        if (elementDefn instanceof SchemaEntityDefinition) {
            entityOrEdgeByGroup.put(group, EntityOrEdge.ENTITY);
            final SchemaEntityDefinition entityDefinition = (SchemaEntityDefinition) elementDefn;
            final String vertexClass = schema.getType(entityDefinition.getVertex()).getClassString();
            final DataType vertexType = getType(vertexClass);
            if (vertexType == null) {
                throw new RuntimeException("Vertex must be a recognised type: found " + vertexClass);
            }/* ww w. j  a va 2s .c  o m*/
            LOGGER.info("Group {} is an entity group - {} is of type {}", group, VERTEX_COL_NAME, vertexType);
            structFieldList.add(new StructField(VERTEX_COL_NAME, vertexType, true, Metadata.empty()));
        } else {
            entityOrEdgeByGroup.put(group, EntityOrEdge.EDGE);
            final SchemaEdgeDefinition edgeDefinition = (SchemaEdgeDefinition) elementDefn;
            final String srcClass = schema.getType(edgeDefinition.getSource()).getClassString();
            final String dstClass = schema.getType(edgeDefinition.getDestination()).getClassString();
            final DataType srcType = getType(srcClass);
            final DataType dstType = getType(dstClass);
            if (srcType == null || dstType == null) {
                throw new RuntimeException("Both source and destination must be recognised types: source was "
                        + srcClass + " destination was " + dstClass);
            }
            LOGGER.info("Group {} is an edge group - {} is of type {}, {} is of type {}", group, SRC_COL_NAME,
                    srcType, DST_COL_NAME, dstType);
            structFieldList.add(new StructField(SRC_COL_NAME, srcType, true, Metadata.empty()));
            structFieldList.add(new StructField(DST_COL_NAME, dstType, true, Metadata.empty()));
        }
        final Set<String> properties = elementDefn.getProperties();
        for (final String property : properties) {
            // Check if property is of a known type that can be handled by default
            final String propertyClass = elementDefn.getPropertyClass(property).getCanonicalName();
            DataType propertyType = getType(propertyClass);
            if (propertyType != null) {
                propertyNeedsConversion.put(property, needsConversion(propertyClass));
                structFieldList.add(new StructField(property, propertyType, true, Metadata.empty()));
                LOGGER.info("Property {} is of type {}", property, propertyType);
            } else {
                // Check if any of the provided converters can handle it
                if (converters != null) {
                    for (final Converter converter : converters) {
                        if (converter.canHandle(elementDefn.getPropertyClass(property))) {
                            propertyNeedsConversion.put(property, true);
                            propertyType = converter.convertedType();
                            converterByProperty.put(property, converter);
                            structFieldList
                                    .add(new StructField(property, propertyType, true, Metadata.empty()));
                            LOGGER.info("Property {} of type {} will be converted by {} to {}", property,
                                    propertyClass, converter.getClass().getName(), propertyType);
                            break;
                        }
                    }
                    if (propertyType == null) {
                        LOGGER.warn(
                                "Ignoring property {} as it is not a recognised type and none of the provided "
                                        + "converters can handle it",
                                property);
                    }
                }
            }
        }
        structTypeByGroup.put(group,
                new StructType(structFieldList.toArray(new StructField[structFieldList.size()])));
    }
    // Create reverse map of field name to StructField
    final Map<String, Set<StructField>> fieldToStructs = new HashMap<>();
    for (final String group : groups) {
        final StructType groupSchema = structTypeByGroup.get(group);
        for (final String field : groupSchema.fieldNames()) {
            if (fieldToStructs.get(field) == null) {
                fieldToStructs.put(field, new HashSet<StructField>());
            }
            fieldToStructs.get(field).add(groupSchema.apply(field));
        }
    }
    // Check consistency, i.e. if the same field appears in multiple groups then the types are consistent
    for (final Entry<String, Set<StructField>> entry : fieldToStructs.entrySet()) {
        final Set<StructField> schemas = entry.getValue();
        if (schemas.size() > 1) {
            throw new IllegalArgumentException("Inconsistent fields: the field " + entry.getKey()
                    + " has more than one definition: " + StringUtils.join(schemas, ','));
        }
    }
    // Merge schemas for groups together - fields should appear in the order the groups were provided
    final LinkedHashSet<StructField> fields = new LinkedHashSet<>();
    fields.add(new StructField(GROUP, DataTypes.StringType, false, Metadata.empty()));
    usedProperties.add(GROUP);
    for (final String group : groups) {
        final StructType groupSchema = structTypeByGroup.get(group);
        for (final String field : groupSchema.fieldNames()) {
            final StructField struct = groupSchema.apply(field);
            // Add struct to fields unless it has already been added
            if (!fields.contains(struct)) {
                fields.add(struct);
                usedProperties.add(field);
            }
        }
    }
    structType = new StructType(fields.toArray(new StructField[fields.size()]));
    LOGGER.info("Schema is {}", structType);
    LOGGER.debug("properties -> conversion: {}", StringUtils.join(propertyNeedsConversion.entrySet(), ','));
}

From source file:org.rapidcontext.core.type.WebService.java

/**
 * Returns the HTTP methods supported for the specified request.
 * The OPTIONS method is always supported and the HEAD method is
 * automatically added if GET is supported.
 *
 * @param request        the request to check
 *
 * @return the array of HTTP method names supported
 *//*w w w . j a v a 2  s .co m*/
public String[] methods(Request request) {
    LinkedHashSet set = new LinkedHashSet();
    set.add(METHOD.OPTIONS);
    set.addAll(Arrays.asList(methodsImpl(request)));
    for (int i = 0; i < matchers.size(); i++) {
        WebMatcher m = (WebMatcher) matchers.get(i);
        if (m.method() != null && m.match(request) > 0) {
            set.add(m.method());
        }
    }
    if (set.contains(METHOD.GET)) {
        set.add(METHOD.HEAD);
    }
    return (String[]) set.toArray(new String[set.size()]);
}

From source file:com.smartitengineering.version.impl.jgit.service.impl.MetaRCSServiceImpl.java

public Set<com.smartitengineering.version.api.Revision> searchForRevisions(
        Collection<QueryParameter> parameters) {
    Collection<QueryParameter> params = parameters == null ? Collections.<QueryParameter>emptyList()
            : parameters;//from  w  ww . j  a  v  a  2  s. c om
    List<Revision> revisions = revisionReader.getList(params.toArray(new QueryParameter[0]));
    LinkedHashSet<com.smartitengineering.version.api.Revision> result = new LinkedHashSet<com.smartitengineering.version.api.Revision>();
    if (revisions != null) {
        for (Revision revision : revisions) {
            result.add(MetaFactory.transformMetaRevision(revision));
        }
    }
    return result;
}