Example usage for java.util LinkedHashSet size

List of usage examples for java.util LinkedHashSet size

Introduction

In this page you can find the example usage for java.util LinkedHashSet size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:org.loklak.geo.GeoNames.java

public LinkedHashSet<String> suggest(String q, int count, int distance) {
    TreeMap<Long, String> a = new TreeMap<>();
    String ql = normalize(q);//from   www . j  a va 2  s .  c  om
    boolean exact = false;
    String exactTerm = null;
    seekloop: for (GeoLocation g : id2loc.values()) {
        termloop: for (String n : g.getNames()) {
            if (n.length() > 3 && n.length() < ql.length() * 4) {
                String nn = normalize(n);
                if (!exact && nn.equals(ql)) {
                    exact = true;
                    exactTerm = n;
                    continue seekloop;
                }
                // starts-with:
                if (nn.startsWith(ql)) {
                    a.put(g.getPopulation() + a.size(), n);
                    if (a.size() > count * 2)
                        break seekloop;
                }
                // distance

                if (nn.length() == ql.length()) {
                    int errorcount = 0;
                    for (int i = 0; i < nn.length(); i++) {
                        if (nn.charAt(i) != ql.charAt(i)) {
                            errorcount++;
                            if (errorcount > distance)
                                continue termloop;
                        }
                    }
                    a.put(g.getPopulation() + a.size(), n);
                    if (a.size() > count * 2)
                        break seekloop;
                }
            }
        }
    }
    // order by population
    LinkedHashSet<String> list = new LinkedHashSet<>();
    int i = 0;
    if (exact) {
        list.add(exactTerm);
    }
    for (Long p : a.descendingKeySet()) {
        list.add(a.get(p));
        if (i >= list.size())
            break;
    }
    return list;
}

From source file:org.rapidcontext.core.type.WebService.java

/**
 * Returns the HTTP methods supported for the specified request.
 * The OPTIONS method is always supported and the HEAD method is
 * automatically added if GET is supported.
 *
 * @param request        the request to check
 *
 * @return the array of HTTP method names supported
 *///from   w w  w .  j a v  a2 s  .c o m
public String[] methods(Request request) {
    LinkedHashSet set = new LinkedHashSet();
    set.add(METHOD.OPTIONS);
    set.addAll(Arrays.asList(methodsImpl(request)));
    for (int i = 0; i < matchers.size(); i++) {
        WebMatcher m = (WebMatcher) matchers.get(i);
        if (m.method() != null && m.match(request) > 0) {
            set.add(m.method());
        }
    }
    if (set.contains(METHOD.GET)) {
        set.add(METHOD.HEAD);
    }
    return (String[]) set.toArray(new String[set.size()]);
}

From source file:org.sakaiproject.nakamura.search.processors.GroupMembersSearchPropertyProvider.java

/**
 * {@inheritDoc}/*from w  ww  . ja v  a 2  s .c  o m*/
 *
 * @see org.sakaiproject.nakamura.api.search.SearchPropertyProvider#loadUserProperties(org.apache.sling.api.SlingHttpServletRequest,
 *      java.util.Map)
 */
public void loadUserProperties(SlingHttpServletRequest request, Map<String, String> propertiesMap) {
    try {
        Session session = request.getResourceResolver().adaptTo(Session.class);
        UserManager um = AccessControlUtil.getUserManager(session);

        if (request.getParameter("q") == null) {
            throw new IllegalArgumentException("Must provide 'q' parameter to use for search.");
        }

        // get the request group name
        String groupName = request.getParameter("group");
        if (groupName == null) {
            throw new IllegalArgumentException("Must provide group to search within.");
        }

        // get the authorizable associated to the requested group name
        Group group = (Group) um.getAuthorizable(groupName);
        if (group == null) {
            throw new IllegalArgumentException("Unable to find group [" + groupName + "]");
        }

        LinkedHashSet<String> memberIds = new LinkedHashSet<String>();

        // collect the declared members of the requested group
        addDeclaredMembers(memberIds, group);

        // get the managers group for the requested group and collect its members
        addDeclaredManagerMembers(memberIds, group);

        boolean includeSelf = Boolean.parseBoolean(request.getParameter("includeSelf"));
        String currentUser = request.getRemoteUser();
        if (!includeSelf) {
            memberIds.remove(currentUser);
        }

        // 900 is the number raydavis said we should split on. This can be tuned as needed.
        if (memberIds.size() > 900) {
            // more than the threshold; pass along for post processing
            request.setAttribute("memberIds", memberIds);
        } else {
            // update the query to filter before writing nodes
            String users = StringUtils.join(memberIds, "' or rep:userId='");
            propertiesMap.put("_groupQuery", "and (rep:userId='" + users + "')");
        }
    } catch (RepositoryException e) {
        logger.error(e.getMessage(), e);
    }
}

From source file:com.act.biointerpretation.cofactorremoval.CofactorRemover.java

/**
 * The function removes similar chemicals from the substrates and products (conenzymes) and remove duplicates
 * within each category.//from  w  w w . j a  va 2s .  co  m
 * @param reaction The reaction being updated.
 */
private void findAndIsolateCoenzymesFromReaction(Reaction reaction) {
    // Build ordered sets of the substrates/products.
    LinkedHashSet<Long> substrates = new LinkedHashSet<>(Arrays.asList(reaction.getSubstrates()));
    LinkedHashSet<Long> products = new LinkedHashSet<>(Arrays.asList(reaction.getProducts()));

    // Compute the intersection between the sets.
    Set<Long> intersection = new HashSet<>(substrates);
    intersection.retainAll(products);

    // A - int(A, B) = A / B
    substrates.removeAll(intersection);
    products.removeAll(intersection);

    // Update the reaction with the new (ordered) substrates/products + coenzymes.
    reaction.setSubstrates(substrates.toArray(new Long[substrates.size()]));
    reaction.setProducts(products.toArray(new Long[products.size()]));

    // Keep any existing coenzymes, but don't use them when computing the difference--they might be there for a reason.
    intersection.addAll(Arrays.asList(reaction.getCoenzymes()));
    reaction.setCoenzymes(intersection.toArray(new Long[intersection.size()]));
}

From source file:net.rim.ejde.internal.packaging.PackagingJob.java

@Override
public void run(IProgressMonitor monitor) throws CoreException {
    // remove the code signing error
    ResourceBuilderUtils.cleanProblemMarkers(ResourcesPlugin.getWorkspace().getRoot(),
            new String[] { IRIMMarker.SIGNATURE_TOOL_PROBLEM_MARKER }, IResource.DEPTH_ONE);
    // open the packaging console
    PackagingConsole.getInstance().activate();
    LinkedHashSet<BlackBerryProject> projectSet = ProjectUtils.getProjectsByBuildOrder(_projects);
    monitor.beginTask(IConstants.EMPTY_STRING, projectSet.size() * 10);
    monitor.subTask(Messages.PackagingJob_Name);
    boolean needSign = false;
    // collect projects which need to be signed
    LinkedHashSet<BlackBerryProject> projectsNeedSigning = new LinkedHashSet<BlackBerryProject>();
    // collect projects whose dependent projects need to be signed
    LinkedHashSet<BlackBerryProject> projectsDependencyNeedSigning = new LinkedHashSet<BlackBerryProject>();
    // collect projects which are packaged successfully
    LinkedHashSet<BlackBerryProject> succesfullyPackagedProjects = new LinkedHashSet<BlackBerryProject>();
    for (BlackBerryProject bbProject : projectSet) {
        // 1. run java build on the project
        if (!isBuildAutomaticallyOn()) {
            try {
                bbProject.getProject().build(IncrementalProjectBuilder.AUTO_BUILD,
                        new SubProgressMonitor(monitor, 1));
            } catch (CoreException e) {
                _log.error(e);/*from ww  w .  jav  a2  s. c o  m*/
            }
        }
        monitor.worked(3);
        // 2. package the project
        if (!needPackaging(bbProject)) {
            if (needGenerateALXFile(bbProject)) {
                PackagingManager.generateALXForProject(bbProject);
            }
        } else {
            // remove the package problems
            ResourceBuilderUtils.cleanProblemMarkers(bbProject.getProject(),
                    new String[] { IRIMMarker.PACKAGING_PROBLEM }, IResource.DEPTH_INFINITE);
            try {
                PackagingManager.packageProject(bbProject);
                if (!needSign) {
                    needSign = true;
                }
            } catch (CoreException e) {
                _log.error(e.getMessage());
                try {
                    ResourceBuilderUtils.createProblemMarker(
                            e.getStatus().getCode() == DiagnosticFactory.CREATE_FOLDER_ERR_ID
                                    ? bbProject.getMetaFileHandler()
                                    : bbProject.getProject(),
                            IRIMMarker.PACKAGING_PROBLEM, e.getMessage(), -1, IMarker.SEVERITY_ERROR);
                } catch (Exception e1) {
                    _log.error(e1.getMessage());
                }
            }
            PackagingJob.setBuiltByJavaBuilders(bbProject.getProject(), false);
        }
        monitor.worked(4);
        // 3. run post-build command
        runPostBuild(bbProject);
        monitor.worked(1);
        // 4. check if the project needs to be signed or not
        if (!hasPackagingProblems(bbProject.getProject())) {
            succesfullyPackagedProjects.add(bbProject);
            if (PackagingUtils.isSigningNeeded(bbProject)) {
                projectsNeedSigning.add(bbProject);
            } else {
                if (PackagingUtils.isSigningNeededForDependency(bbProject)) {
                    projectsDependencyNeedSigning.add(bbProject);
                } else {
                    // if a project and its dependent projects do not need to be signed, copy the cod files to the web folder
                    // copy the cod files of dependency projects to the deployment folders
                    copyDependencyDeploymentFiles(bbProject);
                    // copy files from "Standard" to "Web"
                    copyToWebDeploymentFolder(bbProject);
                }
            }
        }
        monitor.worked(2);
        if (monitor.isCanceled()) {
            monitor.done();
            return;
        }
    }
    // Code signing
    switch (_signingFlag) {
    case SIGN_FORCE: {
        if (!succesfullyPackagedProjects.isEmpty()) {
            signCodFile(succesfullyPackagedProjects, monitor);
        }
        break;
    }
    case SIGN_IF_PROTECTED_API_USED: {
        if (!projectsNeedSigning.isEmpty()) {
            signCodFile(projectsNeedSigning, monitor);
            for (BlackBerryProject project : projectsDependencyNeedSigning) {
                // copy the cod files of dependency projects to the deployment folders
                copyDependencyDeploymentFiles(project);
                // copy files from "Standard" to "Web"
                copyToWebDeploymentFolder(project);
            }
        }
        break;
    }
    case SIGN_IF_NECESSARY: {
        if (needSign) {
            if (!projectsNeedSigning.isEmpty()) {
                signCodFile(projectsNeedSigning, monitor);
                for (BlackBerryProject project : projectsDependencyNeedSigning) {
                    // copy the cod files of dependency projects to the deployment folders
                    copyDependencyDeploymentFiles(project);
                    // copy files from "Standard" to "Web"
                    copyToWebDeploymentFolder(project);
                }
            }
        }
        break;
    }
    }
    monitor.done();
    return;
}

From source file:org.pentaho.reporting.engine.classic.core.modules.misc.datafactory.sql.SimpleSQLReportDataFactory.java

public String[] getReferencedFields(final String query, final DataRow parameters)
        throws ReportDataFactoryException {

    final boolean isNewConnection = connection == null;
    try {//from www .  j a  v  a2 s  .c o m
        final ParametrizationProviderFactory factory = createParametrizationProviderFactory();
        final Connection connection = getConnection(parameters);
        final ParametrizationProvider parametrizationProvider = factory.create(connection);
        final String computedQuery = computedQuery(query, parameters);
        parametrizationProvider.rewriteQueryForParametrization(connection, computedQuery, parameters);
        final LinkedHashSet<String> list = new LinkedHashSet<String>();
        list.addAll(Arrays.asList(parametrizationProvider.getPreparedParameterNames()));
        if (userField != null) {
            list.add(userField);
        }
        if (passwordField != null) {
            list.add(passwordField);
        }
        list.add(DataFactory.QUERY_LIMIT);
        return list.toArray(new String[list.size()]);
    } catch (ReportDataFactoryException e) {
        logger.warn("Unable to perform cache preparation", e);
        throw e;
    } catch (SQLException e) {
        logger.warn("Unable to perform cache preparation", e);
        throw new ReportDataFactoryException("Unable to perform cache preparation", e);
    } finally {
        if (isNewConnection) {
            close();
        }
    }
}

From source file:com.odoko.solrcli.actions.CrawlPostAction.java

/**
 * A very simple crawler, pulling URLs to fetch from a backlog and then
 * recurses N levels deep if recursive>0. Links are parsed from HTML
 * through first getting an XHTML version using SolrCell with extractOnly,
 * and followed if they are local. The crawler pauses for a default delay
 * of 10 seconds between each fetch, this can be configured in the delay
 * variable. This is only meant for test purposes, as it does not respect
 * robots or anything else fancy :)//  www  .  j ava  2 s  . c  o  m
 * @param level which level to crawl
 * @param out output stream to write to
 * @return number of pages crawled on this level and below
 */
protected int webCrawl(int level, OutputStream out) {
  int numPages = 0;
  LinkedHashSet<URL> stack = backlog.get(level);
  int rawStackSize = stack.size();
  stack.removeAll(visited);
  int stackSize = stack.size();
  LinkedHashSet<URL> subStack = new LinkedHashSet<URL>();
  info("Entering crawl at level "+level+" ("+rawStackSize+" links total, "+stackSize+" new)");
  for(URL u : stack) {
    try {
      visited.add(u);
      PageFetcherResult result = pageFetcher.readPageFromUrl(u);
      if(result.httpStatus == 200) {
        u = (result.redirectUrl != null) ? result.redirectUrl : u;
        URL postUrl = new URL(appendParam(solrUrl.toString(), 
            "literal.id="+URLEncoder.encode(u.toString(),"UTF-8") +
            "&literal.url="+URLEncoder.encode(u.toString(),"UTF-8")));
        boolean success = postData(new ByteArrayInputStream(result.content), null, out, result.contentType, postUrl);
        if (success) {
          info("POSTed web resource "+u+" (depth: "+level+")");
          Thread.sleep(delay * 1000);
          numPages++;
          // Pull links from HTML pages only
          if(recursive > level && result.contentType.equals("text/html")) {
            Set<URL> children = pageFetcher.getLinksFromWebPage(u, new ByteArrayInputStream(result.content), result.contentType, postUrl);
            subStack.addAll(children);
          }
        } else {
          warn("An error occurred while posting "+u);
        }
      } else {
        warn("The URL "+u+" returned a HTTP result status of "+result.httpStatus);
      }
    } catch (IOException e) {
      warn("Caught exception when trying to open connection to "+u+": "+e.getMessage());
    } catch (InterruptedException e) {
      throw new RuntimeException();
    }
  }
  if(!subStack.isEmpty()) {
    backlog.add(subStack);
    numPages += webCrawl(level+1, out);
  }
  return numPages;    
}

From source file:com.haulmont.cuba.core.app.RdbmsStore.java

@SuppressWarnings("unchecked")
protected <E extends Entity> List<E> getResultList(LoadContext<E> context, Query query,
        boolean ensureDistinct) {
    List<E> list = executeQuery(query, false);
    int initialSize = list.size();
    if (initialSize == 0) {
        return list;
    }// w w w.ja va 2s  .  c om
    boolean needApplyConstraints = needToApplyInMemoryReadConstraints(context);
    boolean filteredByConstraints = false;
    if (needApplyConstraints) {
        filteredByConstraints = security.filterByConstraints((Collection<Entity>) list);
    }
    if (!ensureDistinct) {
        return filteredByConstraints ? getResultListIteratively(context, query, list, initialSize, true) : list;
    }

    int requestedFirst = context.getQuery().getFirstResult();
    LinkedHashSet<E> set = new LinkedHashSet<>(list);
    if (set.size() == list.size() && requestedFirst == 0 && !filteredByConstraints) {
        // If this is the first chunk and it has no duplicates and security constraints are not applied, just return it
        return list;
    }
    // In case of not first chunk, even if there where no duplicates, start filling the set from zero
    // to ensure correct paging
    return getResultListIteratively(context, query, set, initialSize, needApplyConstraints);
}

From source file:jef.tools.reflect.ClassEx.java

/**
 * ?????/* w w  w. j  av a  2 s  .c  om*/
 * 
 * @return
 */
public Class<?>[] getAllInterfaces() {
    LinkedHashSet<Class<?>> intf = new LinkedHashSet<Class<?>>();
    Class<?> c = cls;
    while (c != Object.class) {
        for (Class<?> ic : c.getInterfaces()) {
            intf.add(ic);
        }
        c = c.getSuperclass();
    }
    return intf.toArray(new Class<?>[intf.size()]);
}

From source file:uk.gov.gchq.gaffer.spark.operation.dataframe.converter.schema.SchemaToStructTypeConverter.java

private void buildSchema() {
    LOGGER.info("Building Spark SQL schema for groups {}", StringUtils.join(groups, ','));
    for (final String group : groups) {
        final SchemaElementDefinition elementDefn = schema.getElement(group);
        final List<StructField> structFieldList = new ArrayList<>();
        if (elementDefn instanceof SchemaEntityDefinition) {
            entityOrEdgeByGroup.put(group, EntityOrEdge.ENTITY);
            final SchemaEntityDefinition entityDefinition = (SchemaEntityDefinition) elementDefn;
            final String vertexClass = schema.getType(entityDefinition.getVertex()).getClassString();
            final DataType vertexType = getType(vertexClass);
            if (vertexType == null) {
                throw new RuntimeException("Vertex must be a recognised type: found " + vertexClass);
            }/*from ww w . ja va 2s. c o  m*/
            LOGGER.info("Group {} is an entity group - {} is of type {}", group, VERTEX_COL_NAME, vertexType);
            structFieldList.add(new StructField(VERTEX_COL_NAME, vertexType, true, Metadata.empty()));
        } else {
            entityOrEdgeByGroup.put(group, EntityOrEdge.EDGE);
            final SchemaEdgeDefinition edgeDefinition = (SchemaEdgeDefinition) elementDefn;
            final String srcClass = schema.getType(edgeDefinition.getSource()).getClassString();
            final String dstClass = schema.getType(edgeDefinition.getDestination()).getClassString();
            final DataType srcType = getType(srcClass);
            final DataType dstType = getType(dstClass);
            if (srcType == null || dstType == null) {
                throw new RuntimeException("Both source and destination must be recognised types: source was "
                        + srcClass + " destination was " + dstClass);
            }
            LOGGER.info("Group {} is an edge group - {} is of type {}, {} is of type {}", group, SRC_COL_NAME,
                    srcType, DST_COL_NAME, dstType);
            structFieldList.add(new StructField(SRC_COL_NAME, srcType, true, Metadata.empty()));
            structFieldList.add(new StructField(DST_COL_NAME, dstType, true, Metadata.empty()));
        }
        final Set<String> properties = elementDefn.getProperties();
        for (final String property : properties) {
            // Check if property is of a known type that can be handled by default
            final String propertyClass = elementDefn.getPropertyClass(property).getCanonicalName();
            DataType propertyType = getType(propertyClass);
            if (propertyType != null) {
                propertyNeedsConversion.put(property, needsConversion(propertyClass));
                structFieldList.add(new StructField(property, propertyType, true, Metadata.empty()));
                LOGGER.info("Property {} is of type {}", property, propertyType);
            } else {
                // Check if any of the provided converters can handle it
                if (converters != null) {
                    for (final Converter converter : converters) {
                        if (converter.canHandle(elementDefn.getPropertyClass(property))) {
                            propertyNeedsConversion.put(property, true);
                            propertyType = converter.convertedType();
                            converterByProperty.put(property, converter);
                            structFieldList
                                    .add(new StructField(property, propertyType, true, Metadata.empty()));
                            LOGGER.info("Property {} of type {} will be converted by {} to {}", property,
                                    propertyClass, converter.getClass().getName(), propertyType);
                            break;
                        }
                    }
                    if (propertyType == null) {
                        LOGGER.warn(
                                "Ignoring property {} as it is not a recognised type and none of the provided "
                                        + "converters can handle it",
                                property);
                    }
                }
            }
        }
        structTypeByGroup.put(group,
                new StructType(structFieldList.toArray(new StructField[structFieldList.size()])));
    }
    // Create reverse map of field name to StructField
    final Map<String, Set<StructField>> fieldToStructs = new HashMap<>();
    for (final String group : groups) {
        final StructType groupSchema = structTypeByGroup.get(group);
        for (final String field : groupSchema.fieldNames()) {
            if (fieldToStructs.get(field) == null) {
                fieldToStructs.put(field, new HashSet<StructField>());
            }
            fieldToStructs.get(field).add(groupSchema.apply(field));
        }
    }
    // Check consistency, i.e. if the same field appears in multiple groups then the types are consistent
    for (final Entry<String, Set<StructField>> entry : fieldToStructs.entrySet()) {
        final Set<StructField> schemas = entry.getValue();
        if (schemas.size() > 1) {
            throw new IllegalArgumentException("Inconsistent fields: the field " + entry.getKey()
                    + " has more than one definition: " + StringUtils.join(schemas, ','));
        }
    }
    // Merge schemas for groups together - fields should appear in the order the groups were provided
    final LinkedHashSet<StructField> fields = new LinkedHashSet<>();
    fields.add(new StructField(GROUP, DataTypes.StringType, false, Metadata.empty()));
    usedProperties.add(GROUP);
    for (final String group : groups) {
        final StructType groupSchema = structTypeByGroup.get(group);
        for (final String field : groupSchema.fieldNames()) {
            final StructField struct = groupSchema.apply(field);
            // Add struct to fields unless it has already been added
            if (!fields.contains(struct)) {
                fields.add(struct);
                usedProperties.add(field);
            }
        }
    }
    structType = new StructType(fields.toArray(new StructField[fields.size()]));
    LOGGER.info("Schema is {}", structType);
    LOGGER.debug("properties -> conversion: {}", StringUtils.join(propertyNeedsConversion.entrySet(), ','));
}