Example usage for java.util HashSet contains

List of usage examples for java.util HashSet contains

Introduction

In this page you can find the example usage for java.util HashSet contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:org.walkmod.conf.providers.yml.RemoveChainsYMLAction.java

@Override
public void doAction(JsonNode node) throws Exception {
    HashSet<String> chainsSet = new HashSet<String>(chains);
    ArrayNode chainsList = null;/*  ww w . ja  va  2  s. c  o m*/
    ObjectMapper mapper = provider.getObjectMapper();

    if (node.has("chains")) {
        JsonNode aux = node.get("chains");
        if (aux.isArray()) {
            chainsList = (ArrayNode) node.get("chains");
            Iterator<JsonNode> it = chainsList.iterator();
            ArrayNode newChainsList = new ArrayNode(mapper.getNodeFactory());
            while (it.hasNext()) {
                JsonNode next = it.next();
                if (next.isObject()) {
                    String type = next.get("name").asText();
                    if (!chainsSet.contains(type)) {
                        newChainsList.add(next);
                    }
                }
            }
            ObjectNode oNode = (ObjectNode) node;
            if (newChainsList.size() > 0) {
                oNode.set("chains", newChainsList);
            } else {
                oNode.remove("chains");
            }
            provider.write(node);
        }
    } else if (node.has("transformations") && chainsSet.contains("default")) {
        ObjectNode oNode = (ObjectNode) node;
        oNode.remove("transformations");
        provider.write(node);
    }

}

From source file:com.pinterest.arcee.autoscaling.AwsAutoScaleGroupManager.java

@Override
public AutoScalingGroupBean getAutoScalingGroupInfoByName(String groupName) throws Exception {
    AutoScalingGroupBean asgInfo = generateDefaultASGInfo();
    DescribeAutoScalingGroupsRequest request = new DescribeAutoScalingGroupsRequest();
    request.setAutoScalingGroupNames(Arrays.asList(groupName));
    DescribeAutoScalingGroupsResult result = aasClient.describeAutoScalingGroups(request);
    List<AutoScalingGroup> asgroups = result.getAutoScalingGroups();

    if (asgroups.isEmpty()) {
        return asgInfo;
    }/*from w w w  . ja  v  a 2s .  c  o m*/

    AutoScalingGroup asgroup = asgroups.get(0);
    // set autoscaling group status
    List<SuspendedProcess> suspendedProcesses = asgroup.getSuspendedProcesses();
    HashSet<String> processName = new HashSet<>();
    for (SuspendedProcess process : suspendedProcesses) {
        processName.add(process.getProcessName());
    }
    if (processName.contains(PROCESS_ALARMNOTIFICATION) && processName.contains(PROCESS_SCHEDULEDACTIONS)) {
        asgInfo.setStatus(ASGStatus.DISABLED);
    } else {
        asgInfo.setStatus(ASGStatus.ENABLED);
    }

    asgInfo.setMinSize(asgroup.getMinSize());
    asgInfo.setMaxSize(asgroup.getMaxSize());
    // TODO this is dangerous that we are using the same value of TerminationPolicy
    String policy = asgroup.getTerminationPolicies().isEmpty() ? "Default"
            : new String(asgroup.getTerminationPolicies().get(0).getBytes());
    asgInfo.setTerminationPolicy(
            AutoScalingTerminationPolicy.valueOf(AutoScalingTerminationPolicy.class, policy));

    List<Instance> instances = asgroup.getInstances();
    for (Instance instance : instances) {
        if (instance.getInstanceId() != null) {
            asgInfo.addToInstances(instance.getInstanceId());
        }
    }
    return asgInfo;
}

From source file:importer.handler.post.stages.Splitter.java

/**
 * Percolate the versions accumulated in root to suitable sub-elements
 * @param elem the start node with its versions to percolate
 *//*from   ww  w . j  ava  2  s.com*/
private void percolateDown(Element elem) {
    Node parent = elem.getParentNode();
    if (parent != null && parent.getNodeType() == Node.ELEMENT_NODE) {
        System.out.println(elem.getNodeName());
        String vers = ((Element) parent).getAttribute(VERSIONS);
        if (vers != null && vers.length() > 0) {
            if (!discriminator.isSibling(elem)) {
                Discriminator.addVersion(elem, vers);
                addDoneTag(elem);
            } else if (elem.hasAttribute(FINAL)) {
                String fVers = elem.getAttribute(FINAL);
                if (fVers != null && fVers.length() > 0) {
                    // find inverse versions
                    HashSet<String> invVers = new HashSet<String>();
                    String[] parts = vers.split(" ");
                    String[] iparts = fVers.split(" ");
                    for (int i = 0; i < parts.length; i++)
                        if ( /*!parts[i].startsWith(DEL) 
                             &&*/ !parts[i].equals(BASE))
                            invVers.add(parts[i]);
                    for (int i = 0; i < iparts.length; i++)
                        if (invVers.contains(iparts[i]))
                            invVers.remove(iparts[i]);
                    String newVers = hashsetToString(invVers);
                    Discriminator.addVersion(elem, newVers);
                    addDoneTag(elem);
                    Element lastOChild = discriminator.lastOpenChild(elem);
                    while (lastOChild != null) {
                        Discriminator.addVersion(lastOChild, newVers);
                        lastOChild = discriminator.lastOpenChild(lastOChild);
                    }
                }
            }
            // else ignore it
        }
    }
    // now examine the children of elem
    Element child = Discriminator.firstChild(elem);
    while (child != null && !isDone(child)) {
        percolateDown(child);
        child = Discriminator.firstChild(child);
    }
    // finall the siblings of elem
    Element brother = Discriminator.nextSibling(elem, true);
    while (brother != null) {
        if (!isDone(brother))
            percolateDown(brother);
        brother = Discriminator.nextSibling(brother, true);
    }
}

From source file:com.gargoylesoftware.htmlunit.javascript.host.dom.Range.java

/**
 * Returns the deepest common ancestor container of the Range's two boundary points.
 * @return the deepest common ancestor container of the Range's two boundary points
 *//*from w w w.j  ava  2 s  .c  o m*/
@JsxGetter
public Object getCommonAncestorContainer() {
    final HashSet<Node> startAncestors = new HashSet<>();
    Node ancestor = startContainer_;
    while (ancestor != null) {
        startAncestors.add(ancestor);
        ancestor = ancestor.getParent();
    }

    ancestor = endContainer_;
    while (ancestor != null) {
        if (startAncestors.contains(ancestor)) {
            return ancestor;
        }
        ancestor = ancestor.getParent();
    }

    return Context.getUndefinedValue();
}

From source file:ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.java

private List<Long> doSearch(String theResourceName, SearchParameterMap theParams, Long theReferencingPid) {
    FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);

    List<Long> pids = null;

    /*/*from  ww  w.j  av a  2 s  .  c  o  m*/
     * Handle textual params
     */
    /*
    for (String nextParamName : theParams.keySet()) {
       for (List<? extends IQueryParameterType> nextAndList : theParams.get(nextParamName)) {
    for (Iterator<? extends IQueryParameterType> orIterator = nextAndList.iterator(); orIterator.hasNext();) {
       IQueryParameterType nextParam = orIterator.next();
       if (nextParam instanceof TokenParam) {
          TokenParam nextTokenParam = (TokenParam) nextParam;
          if (nextTokenParam.isText()) {
             orIterator.remove();
             QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceIndexedSearchParamString.class).get();
             BooleanJunction<?> bool = qb.bool();
            
             bool.must(qb.keyword().onField("myParamName").matching(nextParamName).createQuery());
             if (isNotBlank(theResourceName)) {
                bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
             }
    //                     
             //@formatter:off
             String value = nextTokenParam.getValue().toLowerCase();
             bool.must(qb.keyword().onField("myValueTextEdgeNGram").matching(value).createQuery());
                     
             //@formatter:on
                     
             FullTextQuery ftq = em.createFullTextQuery(bool.createQuery(), ResourceIndexedSearchParamString.class);
            
             List<?> resultList = ftq.getResultList();
             pids = new ArrayList<Long>();
             for (Object next : resultList) {
                ResourceIndexedSearchParamString nextAsArray = (ResourceIndexedSearchParamString) next;
                pids.add(nextAsArray.getResourcePid());
             }
          }
       }
    }
       }
    }
            
    if (pids != null && pids.isEmpty()) {
       return pids;
    }
    */

    QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
    BooleanJunction<?> bool = qb.bool();

    /*
     * Handle _content parameter (resource body content)
     */
    List<List<? extends IQueryParameterType>> contentAndTerms = theParams.remove(Constants.PARAM_CONTENT);
    addTextSearch(qb, bool, contentAndTerms, "myContentText", "myContentTextEdgeNGram", "myContentTextNGram");

    /*
     * Handle _text parameter (resource narrative content)
     */
    List<List<? extends IQueryParameterType>> textAndTerms = theParams.remove(Constants.PARAM_TEXT);
    addTextSearch(qb, bool, textAndTerms, "myNarrativeText", "myNarrativeTextEdgeNGram",
            "myNarrativeTextNGram");

    if (theReferencingPid != null) {
        bool.must(qb.keyword().onField("myResourceLinks.myTargetResourcePid").matching(theReferencingPid)
                .createQuery());
    }

    if (bool.isEmpty()) {
        return pids;
    }

    if (isNotBlank(theResourceName)) {
        bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
    }

    Query luceneQuery = bool.createQuery();

    // wrap Lucene query in a javax.persistence.Query
    FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, ResourceTable.class);
    jpaQuery.setProjection("myId");

    // execute search
    List<?> result = jpaQuery.getResultList();

    HashSet<Long> pidsSet = pids != null ? new HashSet<Long>(pids) : null;

    ArrayList<Long> retVal = new ArrayList<Long>();
    for (Object object : result) {
        Object[] nextArray = (Object[]) object;
        Long next = (Long) nextArray[0];
        if (next != null && (pidsSet == null || pidsSet.contains(next))) {
            retVal.add(next);
        }
    }

    return retVal;
}

From source file:compare.handler.CompareHandler.java

/**
 * Get an array of CorCodes, and their styles and formats too
 * @param docID the docID for the resource
 * @param version1 the group-path+version name
 * @param userCC an array of specified CorCode names for this docID
 * @param diffCC the CorCode of the diffs
 * @param styleNames an array of predefined style-names
 * @param styles an empty arraylist of style names to be filled
 * @return a simple array of CorCode texts in their corresponding formats
 *///from w w w  .  j a  v a  2  s  .c om
protected String[] getCorCodes(String docID, String version1, String[] userCC, CorCode diffCC,
        String[] styleNames, ArrayList<String> styles) throws CompareException, CalliopeException {
    String[] ccTexts = new String[userCC.length + 1];
    // add diffCC entries to corcodes and formats but not styles
    ccTexts[0] = diffCC.toString();
    // load user-defined styles
    if (styleNames.length > 0) {
        String[] styleTexts = fetchStyles(styleNames);
        for (int i = 0; i < styleTexts.length; i++)
            styles.add(styleTexts[i]);
    }
    HashSet<String> found = new HashSet<String>();
    for (int i = 0; i < userCC.length; i++) {
        String ccResource = userCC[i];
        EcdosisVersion ev = doGetResourceVersion(Database.CORCODE, ccResource, version1);
        try {
            char[] versionText = ev.getVersion();
            if (versionText == null)
                throw new CompareException("version not found");
            ccTexts[i + 1] = new String(versionText);
            String style = ev.getStyle();
            if (!found.contains(style)) {
                styles.add(fetchStyle(style));
                found.add(style);
            }
        } catch (Exception e) {
            throw new CompareException(e);
        }
    }
    return ccTexts;
}

From source file:net.datenwerke.sandbox.util.SandboxParser.java

protected SandboxContext loadSandbox(String name, HierarchicalConfiguration conf,
        HierarchicalConfiguration contextConf, HashSet<String> basedOnProcessed) {
    SandboxContext context = new SandboxContext();
    String basedOn = contextConf.getString("[@basedOn]");
    if (null != basedOn) {
        if (basedOnProcessed.contains(basedOn))
            throw new IllegalStateException(
                    "Loop detected: there seems to be a loop in the sandbox configuration at" + basedOn
                            + " and " + name);
        basedOnProcessed.add(basedOn);//w  w  w .ja  va  2s .  c  om

        HierarchicalConfiguration baseConf = null;
        for (HierarchicalConfiguration c : conf.configurationsAt("security.sandbox")) {
            if (name.equals(contextConf.getString("[@name]"))) {
                baseConf = c;
                break;
            }
        }
        if (null == baseConf)
            throw new IllegalStateException("Could not find config for " + basedOn);

        SandboxContext basis = loadSandbox(name, conf, baseConf, basedOnProcessed);
        context = basis.clone();
    }

    Boolean allAccess = contextConf.getBoolean("[@allowAll]", false);
    if (allAccess)
        context.setPassAll(allAccess);

    boolean bypassClassAccesss = contextConf.getBoolean("[@bypassClassAccess]", false);
    context.setBypassClassAccessChecks(bypassClassAccesss);

    boolean bypassPackageAccesss = contextConf.getBoolean("[@bypassPackageAccess]", false);
    context.setBypassPackageAccessChecks(bypassPackageAccesss);

    Boolean debug = contextConf.getBoolean("[@debug]", false);
    if (debug)
        context.setDebug(debug);

    String codesource = contextConf.getString("[@codesource]", null);
    if (null != codesource && !"".equals(codesource.trim()))
        context.setCodesource(codesource);

    /* run in */
    Boolean runInThread = contextConf.getBoolean("[@runInThread]", false);
    if (runInThread)
        context.setRunInThread(runInThread);
    Boolean runRemote = contextConf.getBoolean("[@runRemote]", false);
    if (runRemote)
        context.setRunRemote(runRemote);

    /* finalizers */
    Boolean removeFinalizers = contextConf.getBoolean("[@removeFinalizers]", false);
    if (removeFinalizers)
        context.setRemoveFinalizers(removeFinalizers);

    /* thread */
    configureThreadRestrictions(context, contextConf);

    /* packages */
    configurePackages(context, contextConf);

    /* class access */
    try {
        configureClasses(context, contextConf);

        /* application loader */
        configureClassesForApplicationLoader(context, contextConf);
    } catch (MalformedURLException e) {
        throw new IllegalArgumentException("Could not generate URL", e);
    }

    /* permissions */
    configurePermissions(context, contextConf);

    /* file access */
    configureFileAccess(context, contextConf);

    return context;
}

From source file:com.vmware.thinapp.vi.InventoryBrowser.java

/**
 * Return a list of VINodes containing VM or Folder VINode types for
 * the input Folder, and each Folder recursively containing VINodes for
 * its children.//from  www. j  av a 2  s  .  c  o  m
 *
 * @param folder
 * @return - null if no children exist.
 * @throws RemoteException
 */
public synchronized List<VINode> loadVMSubTreeForResourcePool(ResourcePool pool) throws RemoteException {
    long currentTime = (new Date()).getTime();
    if (currentTime - lastGCTime > CACHE_REFRESH_INTERVAL) {
        // clear all local caches to ensure we don't
        // cache a reused moid (take long time to happen).
        tafVMs.clear();
        othersVMs.clear();
        lastGCTime = currentTime;
    }
    HashSet<String> allVmIds = new HashSet<String>();
    List<VINode> vmList = loadVMSubTreeForResourcePool(pool, allVmIds);

    // GC on tafVMs
    Iterator<String> itr = tafVMs.iterator();
    while (itr.hasNext()) {
        String moid = itr.next();
        if (!allVmIds.contains(moid)) {
            itr.remove();
            log.trace("Removed moid {} from known TAF VM list.", moid);
        }
    }

    // GC on othersVMs
    itr = othersVMs.iterator();
    while (itr.hasNext()) {
        String moid = itr.next();
        if (!allVmIds.contains(moid)) {
            itr.remove();
            log.trace("Removed moid {} from known others VM list.", moid);
        }
    }

    return vmList;
}

From source file:blusunrize.immersiveengineering.client.render.TileRenderAutoWorkbench.java

public static BlueprintLines getBlueprintDrawable(ItemStack stack, World world) {
    if (stack.isEmpty())
        return null;
    EntityPlayer player = ClientUtils.mc().player;
    ArrayList<BufferedImage> images = new ArrayList<>();
    try {/*  ww  w  . j a v a 2 s  .  c  o m*/
        IBakedModel ibakedmodel = ClientUtils.mc().getRenderItem().getItemModelWithOverrides(stack, world,
                player);
        HashSet<String> textures = new HashSet();
        Collection<BakedQuad> quads = ibakedmodel.getQuads(null, null, 0);
        for (BakedQuad quad : quads)
            if (quad != null && quad.getSprite() != null)
                textures.add(quad.getSprite().getIconName());
        for (String s : textures) {
            ResourceLocation rl = new ResourceLocation(s);
            rl = new ResourceLocation(rl.getNamespace(),
                    String.format("%s/%s%s", "textures", rl.getPath(), ".png"));
            IResource resource = ClientUtils.mc().getResourceManager().getResource(rl);
            BufferedImage bufferedImage = TextureUtil.readBufferedImage(resource.getInputStream());
            if (bufferedImage != null)
                images.add(bufferedImage);
        }
    } catch (Exception e) {
    }
    if (images.isEmpty())
        return null;
    ArrayList<Pair<TexturePoint, TexturePoint>> lines = new ArrayList();
    HashSet testSet = new HashSet();
    HashMultimap<Integer, TexturePoint> area = HashMultimap.create();
    int wMax = 0;
    for (BufferedImage bufferedImage : images) {
        Set<Pair<TexturePoint, TexturePoint>> temp_lines = new HashSet<>();

        int w = bufferedImage.getWidth();
        int h = bufferedImage.getHeight();

        if (h > w)
            h = w;
        if (w > wMax)
            wMax = w;
        for (int hh = 0; hh < h; hh++)
            for (int ww = 0; ww < w; ww++) {
                int argb = bufferedImage.getRGB(ww, hh);
                float r = (argb >> 16 & 255) / 255f;
                float g = (argb >> 8 & 255) / 255f;
                float b = (argb & 255) / 255f;
                float intesity = (r + b + g) / 3f;
                int alpha = (argb >> 24) & 255;
                if (alpha > 0) {
                    boolean added = false;
                    //Check colour sets for similar colour to shade it later
                    TexturePoint tp = new TexturePoint(ww, hh, w);
                    if (!testSet.contains(tp)) {
                        for (Integer key : area.keySet()) {
                            for (TexturePoint p : area.get(key)) {
                                float mod = w / (float) p.scale;
                                int pColour = bufferedImage.getRGB((int) (p.x * mod), (int) (p.y * mod));
                                float dR = (r - (pColour >> 16 & 255) / 255f);
                                float dG = (g - (pColour >> 8 & 255) / 255f);
                                float dB = (b - (pColour & 255) / 255f);
                                double delta = Math.sqrt(dR * dR + dG * dG + dB * dB);
                                if (delta < .25) {
                                    area.put(key, tp);
                                    added = true;
                                    break;
                                }
                            }
                            if (added)
                                break;
                        }
                        if (!added)
                            area.put(argb, tp);
                        testSet.add(tp);
                    }
                    //Compare to direct neighbour
                    for (int i = 0; i < 4; i++) {
                        int xx = (i == 0 ? -1 : i == 1 ? 1 : 0);
                        int yy = (i == 2 ? -1 : i == 3 ? 1 : 0);
                        int u = ww + xx;
                        int v = hh + yy;

                        int neighbour = 0;
                        float delta = 1;
                        boolean notTransparent = false;
                        if (u >= 0 && u < w && v >= 0 && v < h) {
                            neighbour = bufferedImage.getRGB(u, v);
                            notTransparent = ((neighbour >> 24) & 255) > 0;
                            if (notTransparent) {
                                float neighbourIntesity = ((neighbour >> 16 & 255) + (neighbour >> 8 & 255)
                                        + (neighbour & 255)) / 765f;
                                float intesityDelta = Math.max(0,
                                        Math.min(1, Math.abs(intesity - neighbourIntesity)));
                                float rDelta = Math.max(0,
                                        Math.min(1, Math.abs(r - (neighbour >> 16 & 255) / 255f)));
                                float gDelta = Math.max(0,
                                        Math.min(1, Math.abs(g - (neighbour >> 8 & 255) / 255f)));
                                float bDelta = Math.max(0, Math.min(1, Math.abs(b - (neighbour & 255) / 255f)));
                                delta = Math.max(intesityDelta, Math.max(rDelta, Math.max(gDelta, bDelta)));
                                delta = delta < .25 ? 0 : delta > .4 ? 1 : delta;
                            }
                        }
                        if (delta > 0) {
                            Pair<TexturePoint, TexturePoint> l = Pair.of(
                                    new TexturePoint(ww + (i == 0 ? 0 : i == 1 ? 1 : 0),
                                            hh + (i == 2 ? 0 : i == 3 ? 1 : 0), w),
                                    new TexturePoint(ww + (i == 0 ? 0 : i == 1 ? 1 : 1),
                                            hh + (i == 2 ? 0 : i == 3 ? 1 : 1), w));
                            temp_lines.add(l);
                        }
                    }
                }
            }
        lines.addAll(temp_lines);
    }

    ArrayList<Integer> lumiSort = new ArrayList<>(area.keySet());
    Collections.sort(lumiSort, (rgb1, rgb2) -> Double.compare(getLuminance(rgb1), getLuminance(rgb2)));
    HashMultimap<ShadeStyle, Point> complete_areaMap = HashMultimap.create();
    int lineNumber = 2;
    int lineStyle = 0;
    for (Integer i : lumiSort) {
        complete_areaMap.putAll(new ShadeStyle(lineNumber, lineStyle), area.get(i));
        ++lineStyle;
        lineStyle %= 3;
        if (lineStyle == 0)
            lineNumber += 1;
    }

    Set<Pair<Point, Point>> complete_lines = new HashSet<>();
    for (Pair<TexturePoint, TexturePoint> line : lines) {
        TexturePoint p1 = line.getKey();
        TexturePoint p2 = line.getValue();
        complete_lines.add(Pair.of(
                new Point((int) (p1.x / (float) p1.scale * wMax), (int) (p1.y / (float) p1.scale * wMax)),
                new Point((int) (p2.x / (float) p2.scale * wMax), (int) (p2.y / (float) p2.scale * wMax))));
    }
    return new BlueprintLines(wMax, complete_lines, complete_areaMap);
}

From source file:mase.generic.WeightedClusterSCPostEval.java

@Override
protected void initializeClusters(EvolutionState state) {
    // initialization should also be biased. the probability of being chosen
    // is proportional to the weight

    this.clusters = new double[numClusters][];
    this.counts = new int[numClusters];
    Integer[] list = new Integer[buffer.size()];
    buffer.keySet().toArray(list);/*from   www .  ja v  a  2s.  co  m*/
    HashSet<Integer> randomKeys = new HashSet<Integer>(numClusters * 2);
    HashMap<Integer, Double> pointWeight = stateCorrelations(state);
    double totalWeight = 0;
    for (Double d : pointWeight.values()) {
        totalWeight += d;
    }

    while (randomKeys.size() < numClusters) {
        int next = -1;
        double rand = state.random[0].nextDouble() * totalWeight;
        for (int i = 0; i < list.length; i++) {
            rand -= pointWeight.get(list[i]);
            if (rand <= 0.0) {
                next = list[i];
                break;
            }
        }
        if (!randomKeys.contains(next)) {
            randomKeys.add(next);
        }
    }
    int clusterIndex = 0;
    for (Integer key : randomKeys) {
        byte[] s = globalKey.get(key);
        double[] cl = new double[s.length];
        for (int i = 0; i < s.length; i++) {
            cl[i] = s[i];
        }
        clusters[clusterIndex++] = cl;
    }

}